var/home/core/zuul-output/0000755000175000017500000000000015066506036014534 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066521141015472 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005200754015066521131017700 0ustar rootrootSep 29 13:44:28 crc systemd[1]: Starting Kubernetes Kubelet... Sep 29 13:44:28 crc restorecon[4583]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 13:44:28 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 13:44:29 crc restorecon[4583]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 29 13:44:29 crc kubenswrapper[4634]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 13:44:29 crc kubenswrapper[4634]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 29 13:44:29 crc kubenswrapper[4634]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 13:44:29 crc kubenswrapper[4634]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 13:44:29 crc kubenswrapper[4634]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 29 13:44:29 crc kubenswrapper[4634]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.860574 4634 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865812 4634 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865851 4634 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865863 4634 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865872 4634 feature_gate.go:330] unrecognized feature gate: Example Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865881 4634 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865889 4634 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865898 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865908 4634 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865917 4634 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865926 4634 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865934 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865945 4634 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865955 4634 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865964 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865972 4634 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865980 4634 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865988 4634 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.865996 4634 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866006 4634 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866017 4634 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866027 4634 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866035 4634 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866046 4634 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866058 4634 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866069 4634 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866109 4634 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866118 4634 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866125 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866145 4634 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866153 4634 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866161 4634 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866169 4634 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866177 4634 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866184 4634 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866192 4634 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866201 4634 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866208 4634 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866216 4634 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866224 4634 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866231 4634 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866239 4634 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866246 4634 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866254 4634 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866262 4634 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866269 4634 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866276 4634 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866284 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866292 4634 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866300 4634 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866308 4634 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866315 4634 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866323 4634 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866331 4634 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866338 4634 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866346 4634 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866353 4634 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866366 4634 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866376 4634 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866384 4634 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866391 4634 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866399 4634 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866407 4634 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866414 4634 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866422 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866440 4634 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866448 4634 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866455 4634 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866463 4634 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866471 4634 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866478 4634 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.866486 4634 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867606 4634 flags.go:64] FLAG: --address="0.0.0.0" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867639 4634 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867663 4634 flags.go:64] FLAG: --anonymous-auth="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867673 4634 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867685 4634 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867695 4634 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867707 4634 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867718 4634 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867727 4634 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867736 4634 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867746 4634 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867755 4634 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867765 4634 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867774 4634 flags.go:64] FLAG: --cgroup-root="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867783 4634 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867793 4634 flags.go:64] FLAG: --client-ca-file="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867802 4634 flags.go:64] FLAG: --cloud-config="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867811 4634 flags.go:64] FLAG: --cloud-provider="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867819 4634 flags.go:64] FLAG: --cluster-dns="[]" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867834 4634 flags.go:64] FLAG: --cluster-domain="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867842 4634 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867852 4634 flags.go:64] FLAG: --config-dir="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867864 4634 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867876 4634 flags.go:64] FLAG: --container-log-max-files="5" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867890 4634 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867901 4634 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867914 4634 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867925 4634 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867934 4634 flags.go:64] FLAG: --contention-profiling="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867945 4634 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867954 4634 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867963 4634 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867973 4634 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867984 4634 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.867994 4634 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868006 4634 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868016 4634 flags.go:64] FLAG: --enable-load-reader="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868028 4634 flags.go:64] FLAG: --enable-server="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868041 4634 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868056 4634 flags.go:64] FLAG: --event-burst="100" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868068 4634 flags.go:64] FLAG: --event-qps="50" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868108 4634 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868118 4634 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868128 4634 flags.go:64] FLAG: --eviction-hard="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868140 4634 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868149 4634 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868158 4634 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868168 4634 flags.go:64] FLAG: --eviction-soft="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868177 4634 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868186 4634 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868195 4634 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868203 4634 flags.go:64] FLAG: --experimental-mounter-path="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868213 4634 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868222 4634 flags.go:64] FLAG: --fail-swap-on="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868230 4634 flags.go:64] FLAG: --feature-gates="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868241 4634 flags.go:64] FLAG: --file-check-frequency="20s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868250 4634 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868260 4634 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868269 4634 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868279 4634 flags.go:64] FLAG: --healthz-port="10248" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868288 4634 flags.go:64] FLAG: --help="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868297 4634 flags.go:64] FLAG: --hostname-override="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868306 4634 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868315 4634 flags.go:64] FLAG: --http-check-frequency="20s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868324 4634 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868335 4634 flags.go:64] FLAG: --image-credential-provider-config="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868343 4634 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868352 4634 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868361 4634 flags.go:64] FLAG: --image-service-endpoint="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868370 4634 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868380 4634 flags.go:64] FLAG: --kube-api-burst="100" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868389 4634 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868401 4634 flags.go:64] FLAG: --kube-api-qps="50" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868410 4634 flags.go:64] FLAG: --kube-reserved="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868419 4634 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868428 4634 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868437 4634 flags.go:64] FLAG: --kubelet-cgroups="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868446 4634 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868455 4634 flags.go:64] FLAG: --lock-file="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868464 4634 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868474 4634 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868482 4634 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868525 4634 flags.go:64] FLAG: --log-json-split-stream="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868534 4634 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868544 4634 flags.go:64] FLAG: --log-text-split-stream="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868553 4634 flags.go:64] FLAG: --logging-format="text" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868562 4634 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868572 4634 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868580 4634 flags.go:64] FLAG: --manifest-url="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868589 4634 flags.go:64] FLAG: --manifest-url-header="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868602 4634 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868612 4634 flags.go:64] FLAG: --max-open-files="1000000" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868623 4634 flags.go:64] FLAG: --max-pods="110" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868632 4634 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868641 4634 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868650 4634 flags.go:64] FLAG: --memory-manager-policy="None" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868659 4634 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868668 4634 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868678 4634 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868687 4634 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868707 4634 flags.go:64] FLAG: --node-status-max-images="50" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868717 4634 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868727 4634 flags.go:64] FLAG: --oom-score-adj="-999" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868736 4634 flags.go:64] FLAG: --pod-cidr="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868745 4634 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868759 4634 flags.go:64] FLAG: --pod-manifest-path="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868768 4634 flags.go:64] FLAG: --pod-max-pids="-1" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868777 4634 flags.go:64] FLAG: --pods-per-core="0" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868786 4634 flags.go:64] FLAG: --port="10250" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868796 4634 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868805 4634 flags.go:64] FLAG: --provider-id="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868814 4634 flags.go:64] FLAG: --qos-reserved="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868823 4634 flags.go:64] FLAG: --read-only-port="10255" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868832 4634 flags.go:64] FLAG: --register-node="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868841 4634 flags.go:64] FLAG: --register-schedulable="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868850 4634 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868872 4634 flags.go:64] FLAG: --registry-burst="10" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868881 4634 flags.go:64] FLAG: --registry-qps="5" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868890 4634 flags.go:64] FLAG: --reserved-cpus="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868899 4634 flags.go:64] FLAG: --reserved-memory="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868910 4634 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868919 4634 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868928 4634 flags.go:64] FLAG: --rotate-certificates="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868937 4634 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868946 4634 flags.go:64] FLAG: --runonce="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868956 4634 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868965 4634 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868974 4634 flags.go:64] FLAG: --seccomp-default="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868983 4634 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.868992 4634 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869002 4634 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869012 4634 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869021 4634 flags.go:64] FLAG: --storage-driver-password="root" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869030 4634 flags.go:64] FLAG: --storage-driver-secure="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869040 4634 flags.go:64] FLAG: --storage-driver-table="stats" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869050 4634 flags.go:64] FLAG: --storage-driver-user="root" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869061 4634 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869072 4634 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869125 4634 flags.go:64] FLAG: --system-cgroups="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869135 4634 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869149 4634 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869159 4634 flags.go:64] FLAG: --tls-cert-file="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869167 4634 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869179 4634 flags.go:64] FLAG: --tls-min-version="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869188 4634 flags.go:64] FLAG: --tls-private-key-file="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869197 4634 flags.go:64] FLAG: --topology-manager-policy="none" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869207 4634 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869216 4634 flags.go:64] FLAG: --topology-manager-scope="container" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869224 4634 flags.go:64] FLAG: --v="2" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869235 4634 flags.go:64] FLAG: --version="false" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869247 4634 flags.go:64] FLAG: --vmodule="" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869257 4634 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.869267 4634 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869488 4634 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869498 4634 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869507 4634 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869517 4634 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869525 4634 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869533 4634 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869541 4634 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869549 4634 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869558 4634 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869566 4634 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869574 4634 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869582 4634 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869589 4634 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869597 4634 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869609 4634 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869619 4634 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869629 4634 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869637 4634 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869646 4634 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869655 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869665 4634 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869673 4634 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869682 4634 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869690 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869698 4634 feature_gate.go:330] unrecognized feature gate: Example Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869706 4634 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869713 4634 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869721 4634 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869728 4634 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869736 4634 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869745 4634 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869752 4634 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869760 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869767 4634 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869774 4634 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869783 4634 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869792 4634 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869801 4634 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869809 4634 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869817 4634 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869825 4634 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869834 4634 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869842 4634 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869850 4634 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869858 4634 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869865 4634 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869873 4634 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869881 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869889 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869897 4634 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869907 4634 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869917 4634 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869926 4634 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869935 4634 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869943 4634 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869952 4634 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869961 4634 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869969 4634 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869977 4634 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869985 4634 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.869992 4634 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870001 4634 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870009 4634 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870019 4634 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870028 4634 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870036 4634 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870044 4634 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870053 4634 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870062 4634 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870070 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.870105 4634 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.871198 4634 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.885477 4634 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.885511 4634 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885601 4634 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885609 4634 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885613 4634 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885618 4634 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885622 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885626 4634 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885630 4634 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885634 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885638 4634 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885642 4634 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885646 4634 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885650 4634 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885653 4634 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885657 4634 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885661 4634 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885664 4634 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885668 4634 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885672 4634 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885675 4634 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885679 4634 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885683 4634 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885686 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885690 4634 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885693 4634 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885697 4634 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885700 4634 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885704 4634 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885707 4634 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885711 4634 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885715 4634 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885719 4634 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885723 4634 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885727 4634 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885730 4634 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885734 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885738 4634 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885741 4634 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885745 4634 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885750 4634 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885755 4634 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885759 4634 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885763 4634 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885766 4634 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885769 4634 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885773 4634 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885777 4634 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885782 4634 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885785 4634 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885789 4634 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885792 4634 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885796 4634 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885799 4634 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885803 4634 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885806 4634 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885809 4634 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885813 4634 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885817 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885820 4634 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885824 4634 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885828 4634 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885831 4634 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885835 4634 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885838 4634 feature_gate.go:330] unrecognized feature gate: Example Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885843 4634 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885848 4634 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885855 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885859 4634 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885863 4634 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885867 4634 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885872 4634 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.885876 4634 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.885884 4634 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886054 4634 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886062 4634 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886067 4634 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886070 4634 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886075 4634 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886095 4634 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886100 4634 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886106 4634 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886110 4634 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886114 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886118 4634 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886122 4634 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886125 4634 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886129 4634 feature_gate.go:330] unrecognized feature gate: Example Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886133 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886137 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886140 4634 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886144 4634 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886147 4634 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886151 4634 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886155 4634 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886158 4634 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886162 4634 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886166 4634 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886169 4634 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886173 4634 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886176 4634 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886180 4634 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886184 4634 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886187 4634 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886191 4634 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886195 4634 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886199 4634 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886202 4634 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886206 4634 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886211 4634 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886215 4634 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886222 4634 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886227 4634 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886231 4634 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886236 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886240 4634 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886243 4634 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886247 4634 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886252 4634 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886256 4634 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886260 4634 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886264 4634 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886267 4634 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886271 4634 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886274 4634 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886278 4634 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886281 4634 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886284 4634 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886288 4634 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886291 4634 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886295 4634 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886298 4634 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886302 4634 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886305 4634 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886309 4634 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886312 4634 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886316 4634 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886319 4634 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886322 4634 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886326 4634 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886329 4634 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886333 4634 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886336 4634 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886339 4634 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 13:44:29 crc kubenswrapper[4634]: W0929 13:44:29.886343 4634 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.886349 4634 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.886501 4634 server.go:940] "Client rotation is on, will bootstrap in background" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.889741 4634 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.889998 4634 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.891218 4634 server.go:997] "Starting client certificate rotation" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.891236 4634 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.891455 4634 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-12 06:41:25.495255198 +0000 UTC Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.891584 4634 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2512h56m55.603678331s for next certificate rotation Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.913578 4634 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.918370 4634 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.937799 4634 log.go:25] "Validated CRI v1 runtime API" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.972959 4634 log.go:25] "Validated CRI v1 image API" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.974813 4634 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.982470 4634 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-29-11-59-36-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 29 13:44:29 crc kubenswrapper[4634]: I0929 13:44:29.982551 4634 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.008398 4634 manager.go:217] Machine: {Timestamp:2025-09-29 13:44:30.004133691 +0000 UTC m=+0.572861530 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2799998 MemoryCapacity:25199484928 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:dfa74544-018f-4337-a1a0-0c08b95c16d0 BootID:28d18494-3e65-4b8e-b583-09026bdf9b9b Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076109 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599742464 Type:vfs Inodes:3076109 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:e9:f8:77 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:e9:f8:77 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:4e:ca:53 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:2b:d0:ac Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:3a:f5:88 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:24:86:90 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:52:c9:79:c4:e7:bf Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:b2:0a:84:84:eb:b6 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199484928 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.008806 4634 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.009025 4634 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.010998 4634 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.011323 4634 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.011387 4634 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.011725 4634 topology_manager.go:138] "Creating topology manager with none policy" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.011745 4634 container_manager_linux.go:303] "Creating device plugin manager" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.012661 4634 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.012720 4634 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.012971 4634 state_mem.go:36] "Initialized new in-memory state store" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.013127 4634 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.017372 4634 kubelet.go:418] "Attempting to sync node with API server" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.017406 4634 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.017445 4634 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.017465 4634 kubelet.go:324] "Adding apiserver pod source" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.017483 4634 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.022504 4634 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.023841 4634 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.026875 4634 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 29 13:44:30 crc kubenswrapper[4634]: W0929 13:44:30.026918 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:30 crc kubenswrapper[4634]: W0929 13:44:30.026933 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.027174 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.027181 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029247 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029294 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029311 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029325 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029347 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029361 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029374 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029396 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029411 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029425 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029446 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.029466 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.030947 4634 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.031799 4634 server.go:1280] "Started kubelet" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.033201 4634 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.033293 4634 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.033833 4634 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 29 13:44:30 crc systemd[1]: Started Kubernetes Kubelet. Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.034995 4634 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.035037 4634 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.035505 4634 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 14:50:14.599353815 +0000 UTC Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.035561 4634 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2233h5m44.563797348s for next certificate rotation Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.035751 4634 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.035771 4634 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.035826 4634 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.036597 4634 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:30 crc kubenswrapper[4634]: W0929 13:44:30.036802 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.036890 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.036968 4634 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.037300 4634 factory.go:55] Registering systemd factory Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.037326 4634 factory.go:221] Registration of the systemd container factory successfully Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.037992 4634 server.go:460] "Adding debug handlers to kubelet server" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.044472 4634 factory.go:153] Registering CRI-O factory Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.044555 4634 factory.go:221] Registration of the crio container factory successfully Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.044730 4634 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.044778 4634 factory.go:103] Registering Raw factory Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.044813 4634 manager.go:1196] Started watching for new ooms in manager Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.046497 4634 manager.go:319] Starting recovery of all containers Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.052477 4634 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.93:6443: connect: connection refused" interval="200ms" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.045852 4634 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.129.56.93:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869c4c2b6a3637f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 13:44:30.031750015 +0000 UTC m=+0.600477794,LastTimestamp:2025-09-29 13:44:30.031750015 +0000 UTC m=+0.600477794,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058010 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058115 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058139 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058160 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058181 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058198 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058215 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058233 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058254 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058271 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058289 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058306 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058324 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058347 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058367 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058386 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058405 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058423 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058442 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058460 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058477 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058494 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058510 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058530 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058548 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058566 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058593 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058612 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058632 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058650 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058669 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058686 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058714 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058732 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058750 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058767 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058785 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058804 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058823 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058840 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058885 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058902 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058919 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058939 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058957 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058976 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.058995 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059013 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059030 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059047 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059125 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059146 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059171 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059191 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059215 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059261 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059299 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059324 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059348 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059369 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059387 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059403 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059422 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059440 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059457 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059473 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059490 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059507 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059525 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059541 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059560 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059577 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059595 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059613 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059631 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059687 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059704 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059723 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059764 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059813 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059844 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059863 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059881 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059899 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059917 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059935 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059952 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059969 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.059985 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060003 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060020 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060039 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060056 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060073 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060136 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060153 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060169 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060189 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060206 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060223 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060241 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060257 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060275 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060296 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060321 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060343 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060364 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060384 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060405 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060422 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060442 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060460 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060478 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060498 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060516 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060535 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060555 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060573 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060592 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060608 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060626 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060643 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060701 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060720 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060737 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060760 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060786 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060808 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060831 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.060857 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065412 4634 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065459 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065529 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065550 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065570 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065589 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065612 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065632 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065700 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065720 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065741 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065763 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065783 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065804 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065869 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065889 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065908 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065929 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065949 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.065969 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066026 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066048 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066071 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066126 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066151 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066236 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066265 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066288 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066311 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066330 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066399 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066421 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066441 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066462 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066482 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066501 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066564 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066587 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066609 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066640 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066659 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066677 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066807 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066851 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066889 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066910 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066975 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.066997 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067016 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067035 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067058 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067078 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067181 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067208 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067230 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067249 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067335 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067365 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067387 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067405 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067430 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067528 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067598 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067620 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067651 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067670 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.067689 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.068168 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.068200 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.068972 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069006 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069027 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069047 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069066 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069118 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069139 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069163 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069182 4634 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069201 4634 reconstruct.go:97] "Volume reconstruction finished" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.069215 4634 reconciler.go:26] "Reconciler: start to sync state" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.074907 4634 manager.go:324] Recovery completed Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.084258 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.085480 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.085505 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.085514 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.086557 4634 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.086581 4634 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.086604 4634 state_mem.go:36] "Initialized new in-memory state store" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.103282 4634 policy_none.go:49] "None policy: Start" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.104552 4634 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.104593 4634 state_mem.go:35] "Initializing new in-memory state store" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.105952 4634 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.108875 4634 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.108906 4634 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.108933 4634 kubelet.go:2335] "Starting kubelet main sync loop" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.108977 4634 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 29 13:44:30 crc kubenswrapper[4634]: W0929 13:44:30.111448 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.111510 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.137324 4634 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.159303 4634 manager.go:334] "Starting Device Plugin manager" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.159565 4634 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.159609 4634 server.go:79] "Starting device plugin registration server" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.160059 4634 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.160077 4634 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.160313 4634 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.160434 4634 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.160446 4634 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.170474 4634 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.210009 4634 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.210159 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.211336 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.211383 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.211395 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.211598 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.214469 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.214610 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.214783 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.214822 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.214841 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.215071 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.215239 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.215287 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216601 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216624 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216639 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216722 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216734 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216796 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216868 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216739 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216964 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.216982 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.217272 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.217301 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.217639 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.217672 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.217684 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.217819 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.217963 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.218017 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219021 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219060 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219075 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219155 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219186 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219203 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219026 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219247 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219265 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219397 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.219443 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.220460 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.220499 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.220517 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.253178 4634 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.93:6443: connect: connection refused" interval="400ms" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.260924 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.261916 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.261952 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.261965 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.261991 4634 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.262398 4634 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.93:6443: connect: connection refused" node="crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271553 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271608 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271668 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271720 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271766 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271812 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271860 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271883 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271925 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271955 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.271979 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.272012 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.272033 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.272076 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.272131 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372759 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372800 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372819 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372836 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372852 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372871 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372885 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372899 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372915 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372931 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372946 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.372961 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373006 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373027 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373045 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373036 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373072 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373127 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373169 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373242 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373261 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373253 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373331 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373302 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373370 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373338 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373357 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373373 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373424 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.373304 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.463374 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.465692 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.465732 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.465741 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.465763 4634 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.466157 4634 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.93:6443: connect: connection refused" node="crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.550764 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.568921 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.575994 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.589349 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.593812 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 13:44:30 crc kubenswrapper[4634]: W0929 13:44:30.602717 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-2734ed4327f431c28cd8baf17783fe74cffc6f0c2d679fb21a9e41e200adb5e3 WatchSource:0}: Error finding container 2734ed4327f431c28cd8baf17783fe74cffc6f0c2d679fb21a9e41e200adb5e3: Status 404 returned error can't find the container with id 2734ed4327f431c28cd8baf17783fe74cffc6f0c2d679fb21a9e41e200adb5e3 Sep 29 13:44:30 crc kubenswrapper[4634]: W0929 13:44:30.605436 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-aff96ac71aebe343b3e46e5a73bef0ffcdc42a6d420c2d67f09f31feaad88bf7 WatchSource:0}: Error finding container aff96ac71aebe343b3e46e5a73bef0ffcdc42a6d420c2d67f09f31feaad88bf7: Status 404 returned error can't find the container with id aff96ac71aebe343b3e46e5a73bef0ffcdc42a6d420c2d67f09f31feaad88bf7 Sep 29 13:44:30 crc kubenswrapper[4634]: W0929 13:44:30.611868 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-524d8e2364ba6173b969f1f27518f4ca8514c4b23d80b1333acc19cc81f4ea9f WatchSource:0}: Error finding container 524d8e2364ba6173b969f1f27518f4ca8514c4b23d80b1333acc19cc81f4ea9f: Status 404 returned error can't find the container with id 524d8e2364ba6173b969f1f27518f4ca8514c4b23d80b1333acc19cc81f4ea9f Sep 29 13:44:30 crc kubenswrapper[4634]: W0929 13:44:30.617263 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-13b429dd67450a1fe2297a5074ddb9a94c7e06bc8db6d36d6407481301f4340c WatchSource:0}: Error finding container 13b429dd67450a1fe2297a5074ddb9a94c7e06bc8db6d36d6407481301f4340c: Status 404 returned error can't find the container with id 13b429dd67450a1fe2297a5074ddb9a94c7e06bc8db6d36d6407481301f4340c Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.654314 4634 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.93:6443: connect: connection refused" interval="800ms" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.866416 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.867604 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.867665 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.867680 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:30 crc kubenswrapper[4634]: I0929 13:44:30.867711 4634 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 13:44:30 crc kubenswrapper[4634]: E0929 13:44:30.868446 4634 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.93:6443: connect: connection refused" node="crc" Sep 29 13:44:31 crc kubenswrapper[4634]: W0929 13:44:31.025921 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:31 crc kubenswrapper[4634]: E0929 13:44:31.026007 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.037867 4634 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:31 crc kubenswrapper[4634]: W0929 13:44:31.086769 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:31 crc kubenswrapper[4634]: E0929 13:44:31.086843 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.115897 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2734ed4327f431c28cd8baf17783fe74cffc6f0c2d679fb21a9e41e200adb5e3"} Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.117692 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"13b429dd67450a1fe2297a5074ddb9a94c7e06bc8db6d36d6407481301f4340c"} Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.118596 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3b4e0dc17f4ddc3f8dbd8fbe299de149f0f5a034d0359b0fe7810ee1b6c2c9a4"} Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.119466 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"524d8e2364ba6173b969f1f27518f4ca8514c4b23d80b1333acc19cc81f4ea9f"} Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.120290 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"aff96ac71aebe343b3e46e5a73bef0ffcdc42a6d420c2d67f09f31feaad88bf7"} Sep 29 13:44:31 crc kubenswrapper[4634]: W0929 13:44:31.298469 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:31 crc kubenswrapper[4634]: E0929 13:44:31.298538 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:31 crc kubenswrapper[4634]: W0929 13:44:31.386227 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:31 crc kubenswrapper[4634]: E0929 13:44:31.386316 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:31 crc kubenswrapper[4634]: E0929 13:44:31.455178 4634 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.93:6443: connect: connection refused" interval="1.6s" Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.669139 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.670686 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.670773 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.670790 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:31 crc kubenswrapper[4634]: I0929 13:44:31.670834 4634 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 13:44:31 crc kubenswrapper[4634]: E0929 13:44:31.671605 4634 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.93:6443: connect: connection refused" node="crc" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.037192 4634 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.125958 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf"} Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.126021 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853"} Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.126037 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2"} Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.126052 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc"} Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.126204 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.127222 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.127252 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.127261 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.128768 4634 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="8254d5dd4ee442314c5186a6bdd483ded7d14b9426cbbd0e8a659b9a0aad77eb" exitCode=0 Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.128834 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.128844 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"8254d5dd4ee442314c5186a6bdd483ded7d14b9426cbbd0e8a659b9a0aad77eb"} Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.129520 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.129538 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.129547 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.132165 4634 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da" exitCode=0 Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.132214 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.132253 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da"} Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.132964 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.132987 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.132996 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.136372 4634 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64" exitCode=0 Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.136480 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64"} Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.136730 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.137796 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.137824 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.137844 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.140210 4634 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290" exitCode=0 Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.140244 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290"} Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.140330 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.140916 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.140930 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.140937 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.142429 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.143021 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.143068 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:32 crc kubenswrapper[4634]: I0929 13:44:32.143097 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:32 crc kubenswrapper[4634]: W0929 13:44:32.837638 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:32 crc kubenswrapper[4634]: E0929 13:44:32.837944 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.038211 4634 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:33 crc kubenswrapper[4634]: E0929 13:44:33.056756 4634 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.129.56.93:6443: connect: connection refused" interval="3.2s" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.143786 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e67ad0abc7b3556e2564905808247dbc4049bc99621abc89d120f641b6b351aa"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.143904 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.145056 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.145093 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.145102 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.147286 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.147312 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.147326 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.147428 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.148289 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.148307 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.148316 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.149527 4634 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f" exitCode=0 Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.149566 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.149830 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.153674 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.153716 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.153739 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.157616 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"efd5057175ec498248eecfc4a29be83c79023856866821ddcc51d79b3ef61156"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.157659 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.157676 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.157686 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.157712 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.157687 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.157803 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7"} Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.158513 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.158540 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.158551 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.158525 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.158611 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.158621 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.271910 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.273458 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.273491 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.273504 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:33 crc kubenswrapper[4634]: I0929 13:44:33.273529 4634 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 13:44:33 crc kubenswrapper[4634]: E0929 13:44:33.274161 4634 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.129.56.93:6443: connect: connection refused" node="crc" Sep 29 13:44:33 crc kubenswrapper[4634]: W0929 13:44:33.544154 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:33 crc kubenswrapper[4634]: E0929 13:44:33.544241 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:33 crc kubenswrapper[4634]: W0929 13:44:33.661364 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.129.56.93:6443: connect: connection refused Sep 29 13:44:33 crc kubenswrapper[4634]: E0929 13:44:33.661465 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.129.56.93:6443: connect: connection refused" logger="UnhandledError" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.163211 4634 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c" exitCode=0 Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.163397 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.163300 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c"} Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.163474 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.163502 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.163530 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.164663 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.164715 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165063 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165109 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165119 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165167 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165201 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165222 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165672 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165692 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.165700 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.166215 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.166232 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:34 crc kubenswrapper[4634]: I0929 13:44:34.166242 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:35 crc kubenswrapper[4634]: I0929 13:44:35.171890 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134"} Sep 29 13:44:35 crc kubenswrapper[4634]: I0929 13:44:35.172325 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062"} Sep 29 13:44:35 crc kubenswrapper[4634]: I0929 13:44:35.172432 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275"} Sep 29 13:44:35 crc kubenswrapper[4634]: I0929 13:44:35.172494 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09"} Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.124844 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.125167 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.125226 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.126958 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.127053 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.127075 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.182410 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba"} Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.182921 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.184905 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.185011 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.185039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.415545 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.415844 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.418295 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.418341 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.418355 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.475080 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.476706 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.476767 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.476787 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.476825 4634 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.961780 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.961971 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.962018 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.963256 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.963292 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:36 crc kubenswrapper[4634]: I0929 13:44:36.963301 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:37 crc kubenswrapper[4634]: I0929 13:44:37.186701 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:37 crc kubenswrapper[4634]: I0929 13:44:37.188069 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:37 crc kubenswrapper[4634]: I0929 13:44:37.188161 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:37 crc kubenswrapper[4634]: I0929 13:44:37.188191 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:38 crc kubenswrapper[4634]: I0929 13:44:38.992278 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:38 crc kubenswrapper[4634]: I0929 13:44:38.992489 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:38 crc kubenswrapper[4634]: I0929 13:44:38.993648 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:38 crc kubenswrapper[4634]: I0929 13:44:38.993682 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:38 crc kubenswrapper[4634]: I0929 13:44:38.993695 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.122386 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.129570 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.192958 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.193192 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.193909 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.193950 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.193972 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.416256 4634 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.416383 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.739957 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.740641 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.741562 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.741586 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.741617 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.784911 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.785139 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.786281 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.786327 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:39 crc kubenswrapper[4634]: I0929 13:44:39.786383 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:40 crc kubenswrapper[4634]: E0929 13:44:40.170646 4634 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.195816 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.197006 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.197060 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.197114 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.259844 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.260167 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.264826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.264870 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:40 crc kubenswrapper[4634]: I0929 13:44:40.264883 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:41 crc kubenswrapper[4634]: I0929 13:44:41.868159 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:41 crc kubenswrapper[4634]: I0929 13:44:41.868343 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:41 crc kubenswrapper[4634]: I0929 13:44:41.869568 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:41 crc kubenswrapper[4634]: I0929 13:44:41.869622 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:41 crc kubenswrapper[4634]: I0929 13:44:41.869640 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:43 crc kubenswrapper[4634]: I0929 13:44:43.002384 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 29 13:44:43 crc kubenswrapper[4634]: I0929 13:44:43.002675 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:43 crc kubenswrapper[4634]: I0929 13:44:43.004152 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:43 crc kubenswrapper[4634]: I0929 13:44:43.004213 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:43 crc kubenswrapper[4634]: I0929 13:44:43.004236 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:43 crc kubenswrapper[4634]: W0929 13:44:43.722543 4634 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 29 13:44:43 crc kubenswrapper[4634]: I0929 13:44:43.722622 4634 trace.go:236] Trace[408716385]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 13:44:33.721) (total time: 10000ms): Sep 29 13:44:43 crc kubenswrapper[4634]: Trace[408716385]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (13:44:43.722) Sep 29 13:44:43 crc kubenswrapper[4634]: Trace[408716385]: [10.000904208s] [10.000904208s] END Sep 29 13:44:43 crc kubenswrapper[4634]: E0929 13:44:43.722644 4634 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.038484 4634 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.208398 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.209999 4634 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="efd5057175ec498248eecfc4a29be83c79023856866821ddcc51d79b3ef61156" exitCode=255 Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.210028 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"efd5057175ec498248eecfc4a29be83c79023856866821ddcc51d79b3ef61156"} Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.210176 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.210943 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.210972 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.210985 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.211661 4634 scope.go:117] "RemoveContainer" containerID="efd5057175ec498248eecfc4a29be83c79023856866821ddcc51d79b3ef61156" Sep 29 13:44:44 crc kubenswrapper[4634]: E0929 13:44:44.253562 4634 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.1869c4c2b6a3637f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 13:44:30.031750015 +0000 UTC m=+0.600477794,LastTimestamp:2025-09-29 13:44:30.031750015 +0000 UTC m=+0.600477794,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.291063 4634 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.291396 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.301530 4634 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 13:44:44 crc kubenswrapper[4634]: I0929 13:44:44.301712 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 13:44:45 crc kubenswrapper[4634]: I0929 13:44:45.214901 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 13:44:45 crc kubenswrapper[4634]: I0929 13:44:45.217236 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c"} Sep 29 13:44:45 crc kubenswrapper[4634]: I0929 13:44:45.217379 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:45 crc kubenswrapper[4634]: I0929 13:44:45.218142 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:45 crc kubenswrapper[4634]: I0929 13:44:45.218195 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:45 crc kubenswrapper[4634]: I0929 13:44:45.218219 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:46 crc kubenswrapper[4634]: I0929 13:44:46.133270 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:46 crc kubenswrapper[4634]: I0929 13:44:46.219557 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:46 crc kubenswrapper[4634]: I0929 13:44:46.219759 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:46 crc kubenswrapper[4634]: I0929 13:44:46.220669 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:46 crc kubenswrapper[4634]: I0929 13:44:46.220705 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:46 crc kubenswrapper[4634]: I0929 13:44:46.220716 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:46 crc kubenswrapper[4634]: I0929 13:44:46.229056 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:47 crc kubenswrapper[4634]: I0929 13:44:47.222195 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:47 crc kubenswrapper[4634]: I0929 13:44:47.223485 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:47 crc kubenswrapper[4634]: I0929 13:44:47.223531 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:47 crc kubenswrapper[4634]: I0929 13:44:47.223551 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:48 crc kubenswrapper[4634]: I0929 13:44:48.224262 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:48 crc kubenswrapper[4634]: I0929 13:44:48.225612 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:48 crc kubenswrapper[4634]: I0929 13:44:48.225641 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:48 crc kubenswrapper[4634]: I0929 13:44:48.225649 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:49 crc kubenswrapper[4634]: E0929 13:44:49.291187 4634 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.292715 4634 trace.go:236] Trace[867331807]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 13:44:37.781) (total time: 11511ms): Sep 29 13:44:49 crc kubenswrapper[4634]: Trace[867331807]: ---"Objects listed" error: 11511ms (13:44:49.292) Sep 29 13:44:49 crc kubenswrapper[4634]: Trace[867331807]: [11.511373415s] [11.511373415s] END Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.292744 4634 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.295312 4634 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.295469 4634 trace.go:236] Trace[796453853]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 13:44:37.357) (total time: 11938ms): Sep 29 13:44:49 crc kubenswrapper[4634]: Trace[796453853]: ---"Objects listed" error: 11938ms (13:44:49.295) Sep 29 13:44:49 crc kubenswrapper[4634]: Trace[796453853]: [11.938150665s] [11.938150665s] END Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.295529 4634 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 29 13:44:49 crc kubenswrapper[4634]: E0929 13:44:49.297787 4634 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.298504 4634 trace.go:236] Trace[1841715352]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 13:44:39.082) (total time: 10216ms): Sep 29 13:44:49 crc kubenswrapper[4634]: Trace[1841715352]: ---"Objects listed" error: 10216ms (13:44:49.298) Sep 29 13:44:49 crc kubenswrapper[4634]: Trace[1841715352]: [10.216293016s] [10.216293016s] END Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.298546 4634 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.379820 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:49 crc kubenswrapper[4634]: I0929 13:44:49.385867 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.029874 4634 apiserver.go:52] "Watching apiserver" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.033923 4634 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.034204 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.034577 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.034597 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.034632 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.034764 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.034826 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.034842 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.034930 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.034980 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.035327 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.036747 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.036839 4634 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.038531 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.038588 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.038530 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.038664 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.038838 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.039208 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.044016 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.044022 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.062431 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.072572 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.082162 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.091196 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.099587 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.100703 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.100844 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.100877 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.100901 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.100921 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.100941 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.100961 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.100984 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101009 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101028 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101032 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101050 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101070 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.101132 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:44:50.60111524 +0000 UTC m=+21.169842989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101151 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101169 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101173 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101184 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101198 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101213 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101233 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101248 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101264 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101323 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101341 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101355 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101369 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101384 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101416 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101431 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101446 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101462 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101476 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101490 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101504 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101519 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101554 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101568 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101584 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101600 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101614 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101614 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101792 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101808 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101824 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101645 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.101993 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102003 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102018 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102041 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102044 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102057 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102072 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102063 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102105 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102150 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102159 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102181 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102201 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102218 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102233 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102249 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102254 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102266 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102286 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102307 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102322 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102339 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102358 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102373 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102389 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102404 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102426 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102442 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102457 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102473 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102489 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102507 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102526 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102544 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102561 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102577 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102592 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102606 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102622 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102637 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102654 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102670 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102687 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102703 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102720 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102738 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102788 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102833 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102850 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102866 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102881 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102898 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102914 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102929 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102947 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102962 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102976 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102992 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103007 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103024 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103039 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103054 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103069 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103101 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103120 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103136 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103152 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103183 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103201 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103219 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103235 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103250 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103265 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103281 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103297 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103312 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103331 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103347 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103364 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103381 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103399 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103415 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103433 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103450 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103466 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103483 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103499 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103516 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103531 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103549 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103568 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103584 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103599 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103614 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103629 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103644 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103661 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103677 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103697 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103714 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103729 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103750 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103766 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103783 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103800 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103817 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103839 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103855 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103871 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103889 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103906 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103923 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103940 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103959 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103979 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103999 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104018 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104035 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104052 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104067 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104097 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104114 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104131 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104147 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104163 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104180 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104197 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104213 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104228 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104245 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104263 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104280 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104297 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104316 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104333 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104350 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104367 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104383 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104401 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104417 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104434 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104452 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104470 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104486 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104503 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104521 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104539 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104555 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104571 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104589 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104606 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104623 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104640 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104656 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104673 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104691 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104707 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104723 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104740 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104757 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104774 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104810 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104834 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104853 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104871 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104890 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.106818 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.106863 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.106894 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.106926 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.106955 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.106977 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107001 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107023 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107047 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107141 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107162 4634 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107174 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107184 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107199 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107210 4634 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107222 4634 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107232 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107248 4634 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107259 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102264 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102400 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102499 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102519 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102553 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102634 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102678 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102707 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102762 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.102875 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103096 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103189 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103204 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103309 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103307 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108896 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103432 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103481 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103497 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103530 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103545 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103658 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103670 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103738 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103789 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103818 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103837 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103950 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.103969 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104124 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104135 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104272 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104296 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.104471 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107288 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107294 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107456 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107574 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107598 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107759 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107802 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107849 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.107998 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108228 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108342 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108349 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108518 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108547 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108577 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108680 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108842 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.108984 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109003 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109213 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109262 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109404 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109448 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109586 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109612 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109645 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109753 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109791 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109873 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.109917 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.110073 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.110268 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.110535 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.111094 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.112471 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.112573 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.112755 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.113017 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.113707 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.112612 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.115041 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.115706 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.115840 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.116880 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.117168 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.117355 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.117509 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.117647 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.117781 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.117912 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.118044 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.118418 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.118756 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.119356 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.120491 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.121493 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.121762 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.121841 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.121965 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.122279 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.123477 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.123709 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.123976 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.124062 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.124066 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.124079 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.124337 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.122538 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.124391 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.124424 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.124499 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.124701 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.125269 4634 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.125375 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:50.625357834 +0000 UTC m=+21.194085583 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.125571 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.126041 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.126875 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.127020 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.127057 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.127065 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.127219 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.127585 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.127606 4634 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.127856 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.127863 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.128038 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.128292 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.128424 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.128469 4634 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.128560 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:50.628543001 +0000 UTC m=+21.197270750 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.129066 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.129185 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.130029 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.130219 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.130648 4634 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.130655 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.130697 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.130734 4634 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.130752 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.130795 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:50.630781273 +0000 UTC m=+21.199509022 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.130832 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.130534 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.130556 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.130963 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.131150 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.131523 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.131693 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.132173 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.132299 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.132521 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.135886 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.136651 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.136828 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.137075 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.137626 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.139994 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.140016 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.140033 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.140043 4634 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.140076 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:50.640064357 +0000 UTC m=+21.208792106 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.131986 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.140429 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.140473 4634 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.140728 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.140904 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.141051 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.141183 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.141251 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.141347 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.141698 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.141921 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.142154 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.142335 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.142655 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.142839 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.143044 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.143407 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.143477 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.143665 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.143868 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.143878 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.144055 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.144346 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.144349 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.144549 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.144703 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.145251 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.146050 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.146102 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.146366 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.147232 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.147588 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.148096 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.148111 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.148154 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.148263 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.148310 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.148734 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.148905 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.148931 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.149378 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.149398 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.149444 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.149669 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.149896 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.150390 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.150371 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.150633 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.150854 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.153552 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.154175 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.158124 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.158703 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.158955 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.159321 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.160536 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.160576 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.161537 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.163001 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.163939 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.166195 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.167154 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.167279 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.170012 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.170874 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.172150 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.172868 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.175852 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.176509 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.177290 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.178880 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.180022 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.180271 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.181582 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.181577 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.182278 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.182847 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.184123 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.184870 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.185242 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.186134 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.186757 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.187825 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.188624 4634 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.188793 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.189031 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.191231 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.191810 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.192288 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.193800 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.194846 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.195458 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.196476 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.197069 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.197348 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.198384 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.199164 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.200176 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.201209 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.201943 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.202555 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.203516 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.204277 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.205222 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.205761 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.206705 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.206908 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.207476 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.209279 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.211608 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.214800 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.225110 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.229233 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.229798 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.230863 4634 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c" exitCode=255 Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.230920 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c"} Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.230965 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231012 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231041 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231223 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231255 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231268 4634 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231276 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231286 4634 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231295 4634 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231305 4634 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231314 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231324 4634 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231333 4634 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231343 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231353 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231362 4634 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231372 4634 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231382 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231391 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231401 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231413 4634 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231425 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231439 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231450 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231460 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231488 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231507 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231517 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231526 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231534 4634 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231543 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231552 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231560 4634 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231569 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231577 4634 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231586 4634 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231594 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231602 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231610 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231619 4634 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231628 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231635 4634 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231643 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231651 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231660 4634 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231668 4634 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231687 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231696 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231705 4634 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231714 4634 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231723 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231732 4634 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231743 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231752 4634 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231761 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231769 4634 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231777 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231786 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231795 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231803 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231812 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231821 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231829 4634 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.231838 4634 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232238 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232252 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232260 4634 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232269 4634 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232278 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232287 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232296 4634 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232304 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232312 4634 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232321 4634 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232329 4634 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232337 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232356 4634 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232369 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232377 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232387 4634 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232397 4634 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232405 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232413 4634 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232421 4634 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232429 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232437 4634 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232450 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232458 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232466 4634 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232474 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232483 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232490 4634 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232498 4634 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232507 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232514 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232522 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232530 4634 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232538 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232545 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232553 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232562 4634 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232570 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232578 4634 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232586 4634 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232596 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232604 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232612 4634 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232620 4634 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232628 4634 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232636 4634 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232644 4634 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232655 4634 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232663 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232672 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232680 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232688 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232697 4634 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232705 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232713 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232721 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232728 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232746 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232755 4634 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232763 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232771 4634 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232779 4634 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232787 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232795 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232803 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232811 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232819 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232827 4634 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232836 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232821 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232845 4634 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.232992 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233003 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233013 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233023 4634 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233031 4634 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233040 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233048 4634 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233056 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233064 4634 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233074 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233096 4634 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233105 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233115 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233123 4634 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233133 4634 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233141 4634 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233150 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233159 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233168 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233178 4634 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233186 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233194 4634 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233202 4634 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233210 4634 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233218 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233229 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233237 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233245 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233253 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233261 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233268 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233276 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233283 4634 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233291 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233298 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233306 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233314 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233322 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233331 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233340 4634 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233348 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233356 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233363 4634 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233385 4634 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233395 4634 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233420 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233428 4634 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233449 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233459 4634 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233467 4634 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233475 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233484 4634 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233492 4634 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.233909 4634 scope.go:117] "RemoveContainer" containerID="efd5057175ec498248eecfc4a29be83c79023856866821ddcc51d79b3ef61156" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.243633 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.243752 4634 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.247849 4634 scope.go:117] "RemoveContainer" containerID="0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.248375 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.248561 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.256484 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.264730 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.272244 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.281977 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.289933 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.297978 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.306864 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://efd5057175ec498248eecfc4a29be83c79023856866821ddcc51d79b3ef61156\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:44Z\\\",\\\"message\\\":\\\"W0929 13:44:33.205640 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 13:44:33.207154 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759153473 cert, and key in /tmp/serving-cert-2249451748/serving-signer.crt, /tmp/serving-cert-2249451748/serving-signer.key\\\\nI0929 13:44:33.450523 1 observer_polling.go:159] Starting file observer\\\\nW0929 13:44:33.453465 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 13:44:33.453759 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:33.454955 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2249451748/tls.crt::/tmp/serving-cert-2249451748/tls.key\\\\\\\"\\\\nF0929 13:44:44.047354 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.316397 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.347687 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.357239 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.361688 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 13:44:50 crc kubenswrapper[4634]: W0929 13:44:50.366569 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-733d9bad708a5c5dc43a0f5feb59b152e52fb894b0cfec56f264246cd455ef71 WatchSource:0}: Error finding container 733d9bad708a5c5dc43a0f5feb59b152e52fb894b0cfec56f264246cd455ef71: Status 404 returned error can't find the container with id 733d9bad708a5c5dc43a0f5feb59b152e52fb894b0cfec56f264246cd455ef71 Sep 29 13:44:50 crc kubenswrapper[4634]: W0929 13:44:50.374137 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-f24f10985e045dc3316ea887a02b4569e199fa95b561a489b22fc5f2cb849759 WatchSource:0}: Error finding container f24f10985e045dc3316ea887a02b4569e199fa95b561a489b22fc5f2cb849759: Status 404 returned error can't find the container with id f24f10985e045dc3316ea887a02b4569e199fa95b561a489b22fc5f2cb849759 Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.638376 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.638666 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:44:51.638627575 +0000 UTC m=+22.207355364 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.638788 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.638844 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.638914 4634 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.638931 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.639008 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:51.638985195 +0000 UTC m=+22.207713064 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.639165 4634 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.639173 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.639221 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.639241 4634 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.639281 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:51.639256993 +0000 UTC m=+22.207984782 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.639333 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:51.639295224 +0000 UTC m=+22.208023013 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.740471 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.740698 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.740746 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.740761 4634 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:50 crc kubenswrapper[4634]: E0929 13:44:50.740831 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:51.740814949 +0000 UTC m=+22.309542698 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:50 crc kubenswrapper[4634]: I0929 13:44:50.787524 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.236225 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.239286 4634 scope.go:117] "RemoveContainer" containerID="0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c" Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.239411 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.242267 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e"} Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.242293 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145"} Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.242302 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f24f10985e045dc3316ea887a02b4569e199fa95b561a489b22fc5f2cb849759"} Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.244351 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"733d9bad708a5c5dc43a0f5feb59b152e52fb894b0cfec56f264246cd455ef71"} Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.251377 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50"} Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.251423 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"22a6bc5d63c92c49bddfa17f4799c2df37aa3c257bcf11e4a4843f814438be9d"} Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.275244 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.300874 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.315203 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.328657 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.346861 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.358425 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.376419 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.390207 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.407016 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.432279 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.446304 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.458109 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.471701 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.485002 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.495466 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.508325 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.649014 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.649072 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.649121 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.649139 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649232 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649245 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649254 4634 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649312 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:44:53.64925019 +0000 UTC m=+24.217977959 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649344 4634 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649323 4634 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649371 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:53.649359263 +0000 UTC m=+24.218087012 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649459 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:53.649453085 +0000 UTC m=+24.218180834 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.649473 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:53.649466966 +0000 UTC m=+24.218194715 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:51 crc kubenswrapper[4634]: I0929 13:44:51.750552 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.750720 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.750742 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.750756 4634 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:51 crc kubenswrapper[4634]: E0929 13:44:51.750822 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:53.750802245 +0000 UTC m=+24.319530014 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:52 crc kubenswrapper[4634]: I0929 13:44:52.109855 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:52 crc kubenswrapper[4634]: I0929 13:44:52.109902 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:52 crc kubenswrapper[4634]: E0929 13:44:52.109980 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:44:52 crc kubenswrapper[4634]: E0929 13:44:52.110120 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:44:52 crc kubenswrapper[4634]: I0929 13:44:52.110177 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:52 crc kubenswrapper[4634]: E0929 13:44:52.110233 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:44:52 crc kubenswrapper[4634]: I0929 13:44:52.113595 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 29 13:44:52 crc kubenswrapper[4634]: I0929 13:44:52.114580 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 29 13:44:52 crc kubenswrapper[4634]: I0929 13:44:52.115322 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 29 13:44:52 crc kubenswrapper[4634]: I0929 13:44:52.253954 4634 scope.go:117] "RemoveContainer" containerID="0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c" Sep 29 13:44:52 crc kubenswrapper[4634]: E0929 13:44:52.254170 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.032808 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.048136 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.052322 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.052774 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.068060 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.087832 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.103827 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.121700 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.136307 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.152073 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.169577 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.182454 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.195624 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.207967 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.225766 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.239053 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.251157 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.256720 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416"} Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.268989 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.291607 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.306363 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.330959 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.362446 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.420569 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.437097 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.457659 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.498141 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.526761 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.553885 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.579393 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:53Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.671771 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.671946 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:44:57.671907688 +0000 UTC m=+28.240635437 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.672148 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.672233 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.672314 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.672347 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.672363 4634 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.672407 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:57.672395601 +0000 UTC m=+28.241123460 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.672326 4634 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.672452 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:57.672443762 +0000 UTC m=+28.241171631 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.672499 4634 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.672326 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.672605 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:57.672576446 +0000 UTC m=+28.241304285 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:53 crc kubenswrapper[4634]: I0929 13:44:53.773238 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.773363 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.773378 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.773388 4634 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:53 crc kubenswrapper[4634]: E0929 13:44:53.773429 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 13:44:57.773417332 +0000 UTC m=+28.342145081 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.109548 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.109557 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.109611 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:54 crc kubenswrapper[4634]: E0929 13:44:54.110282 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:44:54 crc kubenswrapper[4634]: E0929 13:44:54.110357 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:44:54 crc kubenswrapper[4634]: E0929 13:44:54.110437 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.310738 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-wtnjd"] Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.311203 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.311372 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-k9jf4"] Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.311632 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-sxkt4"] Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.311835 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-sxkt4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.312136 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.322686 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.322991 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.323191 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.323020 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.323065 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.323936 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.324178 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.325316 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.325324 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.325459 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.325470 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.325596 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.328149 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.341818 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.353779 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.368595 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377435 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-cni-bin\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377473 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-hostroot\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377505 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n72r\" (UniqueName: \"kubernetes.io/projected/9173d45a-da12-4090-92c3-65ad4dcec715-kube-api-access-6n72r\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377529 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/3c08520a-99c4-43f8-8cd3-6547f22c571b-hosts-file\") pod \"node-resolver-sxkt4\" (UID: \"3c08520a-99c4-43f8-8cd3-6547f22c571b\") " pod="openshift-dns/node-resolver-sxkt4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377615 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-multus-certs\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377672 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9173d45a-da12-4090-92c3-65ad4dcec715-proxy-tls\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377699 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkwzm\" (UniqueName: \"kubernetes.io/projected/3c08520a-99c4-43f8-8cd3-6547f22c571b-kube-api-access-lkwzm\") pod \"node-resolver-sxkt4\" (UID: \"3c08520a-99c4-43f8-8cd3-6547f22c571b\") " pod="openshift-dns/node-resolver-sxkt4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377717 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-daemon-config\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377737 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9173d45a-da12-4090-92c3-65ad4dcec715-rootfs\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377752 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-etc-kubernetes\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377774 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9173d45a-da12-4090-92c3-65ad4dcec715-mcd-auth-proxy-config\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377788 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-kubelet\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377806 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-cni-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377822 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-socket-dir-parent\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377846 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-system-cni-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377867 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-os-release\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377888 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-conf-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377903 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svzgk\" (UniqueName: \"kubernetes.io/projected/77b5113e-50cd-417c-8991-cae5cd823f3f-kube-api-access-svzgk\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377920 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-netns\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377940 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-cni-multus\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.377967 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-cnibin\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.378010 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/77b5113e-50cd-417c-8991-cae5cd823f3f-cni-binary-copy\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.378034 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-k8s-cni-cncf-io\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.381992 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.395319 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.414199 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.425711 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.436178 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.446331 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.458907 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.470430 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479297 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-multus-certs\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479337 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9173d45a-da12-4090-92c3-65ad4dcec715-proxy-tls\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479354 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkwzm\" (UniqueName: \"kubernetes.io/projected/3c08520a-99c4-43f8-8cd3-6547f22c571b-kube-api-access-lkwzm\") pod \"node-resolver-sxkt4\" (UID: \"3c08520a-99c4-43f8-8cd3-6547f22c571b\") " pod="openshift-dns/node-resolver-sxkt4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479371 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-daemon-config\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479387 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9173d45a-da12-4090-92c3-65ad4dcec715-rootfs\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479404 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-etc-kubernetes\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479424 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9173d45a-da12-4090-92c3-65ad4dcec715-mcd-auth-proxy-config\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479427 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-multus-certs\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479442 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-kubelet\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479459 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-cni-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479474 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-socket-dir-parent\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479497 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-system-cni-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479511 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-os-release\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479525 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-conf-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479539 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svzgk\" (UniqueName: \"kubernetes.io/projected/77b5113e-50cd-417c-8991-cae5cd823f3f-kube-api-access-svzgk\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479555 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-netns\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479570 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-cni-multus\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479592 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-cnibin\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479607 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/77b5113e-50cd-417c-8991-cae5cd823f3f-cni-binary-copy\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479621 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-k8s-cni-cncf-io\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479635 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-cni-bin\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479651 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-hostroot\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479671 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n72r\" (UniqueName: \"kubernetes.io/projected/9173d45a-da12-4090-92c3-65ad4dcec715-kube-api-access-6n72r\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479685 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/3c08520a-99c4-43f8-8cd3-6547f22c571b-hosts-file\") pod \"node-resolver-sxkt4\" (UID: \"3c08520a-99c4-43f8-8cd3-6547f22c571b\") " pod="openshift-dns/node-resolver-sxkt4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479722 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-conf-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.479748 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/3c08520a-99c4-43f8-8cd3-6547f22c571b-hosts-file\") pod \"node-resolver-sxkt4\" (UID: \"3c08520a-99c4-43f8-8cd3-6547f22c571b\") " pod="openshift-dns/node-resolver-sxkt4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480024 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-netns\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480051 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-cni-multus\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480100 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-cnibin\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480124 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-hostroot\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480155 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-cni-bin\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480276 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-var-lib-kubelet\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480306 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/9173d45a-da12-4090-92c3-65ad4dcec715-rootfs\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480330 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-etc-kubernetes\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480403 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-daemon-config\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480518 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-socket-dir-parent\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480537 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-host-run-k8s-cni-cncf-io\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480652 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-system-cni-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480605 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-os-release\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480713 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/77b5113e-50cd-417c-8991-cae5cd823f3f-cni-binary-copy\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480606 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/77b5113e-50cd-417c-8991-cae5cd823f3f-multus-cni-dir\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.480860 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9173d45a-da12-4090-92c3-65ad4dcec715-mcd-auth-proxy-config\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.483239 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/9173d45a-da12-4090-92c3-65ad4dcec715-proxy-tls\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.483594 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.499757 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svzgk\" (UniqueName: \"kubernetes.io/projected/77b5113e-50cd-417c-8991-cae5cd823f3f-kube-api-access-svzgk\") pod \"multus-wtnjd\" (UID: \"77b5113e-50cd-417c-8991-cae5cd823f3f\") " pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.501724 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n72r\" (UniqueName: \"kubernetes.io/projected/9173d45a-da12-4090-92c3-65ad4dcec715-kube-api-access-6n72r\") pod \"machine-config-daemon-k9jf4\" (UID: \"9173d45a-da12-4090-92c3-65ad4dcec715\") " pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.502516 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.505624 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkwzm\" (UniqueName: \"kubernetes.io/projected/3c08520a-99c4-43f8-8cd3-6547f22c571b-kube-api-access-lkwzm\") pod \"node-resolver-sxkt4\" (UID: \"3c08520a-99c4-43f8-8cd3-6547f22c571b\") " pod="openshift-dns/node-resolver-sxkt4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.519447 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.542574 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.568496 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.586002 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.609408 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.617342 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.624440 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wtnjd" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.631331 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.638493 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-sxkt4" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.639323 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:44:54 crc kubenswrapper[4634]: W0929 13:44:54.640041 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77b5113e_50cd_417c_8991_cae5cd823f3f.slice/crio-250100900e59f17d1c244d434868f57aa208d581352a4ede99fa70435678376c WatchSource:0}: Error finding container 250100900e59f17d1c244d434868f57aa208d581352a4ede99fa70435678376c: Status 404 returned error can't find the container with id 250100900e59f17d1c244d434868f57aa208d581352a4ede99fa70435678376c Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.646633 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: W0929 13:44:54.651300 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c08520a_99c4_43f8_8cd3_6547f22c571b.slice/crio-ee4e59f6938ddd83b865fe6830c0d68dc6284c083f7f37cad8ed6e4884b86a80 WatchSource:0}: Error finding container ee4e59f6938ddd83b865fe6830c0d68dc6284c083f7f37cad8ed6e4884b86a80: Status 404 returned error can't find the container with id ee4e59f6938ddd83b865fe6830c0d68dc6284c083f7f37cad8ed6e4884b86a80 Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.661354 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.672624 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-b59nb"] Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.673250 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.674757 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.678509 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jqlh6"] Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.681213 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.682006 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-system-cni-dir\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.682039 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.682126 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64gtc\" (UniqueName: \"kubernetes.io/projected/fd1b6a92-008b-40ed-bbbb-15270d2f599a-kube-api-access-64gtc\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.682162 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.682189 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-os-release\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.682257 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cnibin\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.682277 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cni-binary-copy\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.690835 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.693748 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.693883 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.693983 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.693992 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.694060 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.694187 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.694141 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.697225 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.713497 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.723851 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.738521 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.753324 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.766624 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.779552 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.782775 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cni-binary-copy\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.782815 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-script-lib\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.782845 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-ovn\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.782863 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-env-overrides\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.782880 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-systemd-units\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.782895 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-netns\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.782910 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.782951 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-node-log\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783052 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-netd\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783131 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k24j2\" (UniqueName: \"kubernetes.io/projected/65f06677-4cbf-41c9-a0da-02f49710c11c-kube-api-access-k24j2\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783240 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-system-cni-dir\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783280 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-systemd\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783331 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783348 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-bin\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783362 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-kubelet\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783386 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cni-binary-copy\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783407 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64gtc\" (UniqueName: \"kubernetes.io/projected/fd1b6a92-008b-40ed-bbbb-15270d2f599a-kube-api-access-64gtc\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783425 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65f06677-4cbf-41c9-a0da-02f49710c11c-ovn-node-metrics-cert\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783491 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-var-lib-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783519 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783565 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-log-socket\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783588 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-ovn-kubernetes\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783605 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-os-release\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783620 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-system-cni-dir\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783641 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-slash\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783672 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783693 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-config\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783712 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cnibin\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783745 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-etc-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783742 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cnibin\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.783817 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.784006 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fd1b6a92-008b-40ed-bbbb-15270d2f599a-os-release\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.784098 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/fd1b6a92-008b-40ed-bbbb-15270d2f599a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.798148 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.802256 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64gtc\" (UniqueName: \"kubernetes.io/projected/fd1b6a92-008b-40ed-bbbb-15270d2f599a-kube-api-access-64gtc\") pod \"multus-additional-cni-plugins-b59nb\" (UID: \"fd1b6a92-008b-40ed-bbbb-15270d2f599a\") " pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.813104 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.826851 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.839940 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.853415 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.863994 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.882459 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884598 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-slash\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884632 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-log-socket\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884646 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-ovn-kubernetes\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884665 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884679 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-config\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884695 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-etc-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884710 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-script-lib\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884735 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-ovn\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884747 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-env-overrides\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884764 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-node-log\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884785 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-systemd-units\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884802 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-netns\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884815 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884828 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-netd\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884844 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k24j2\" (UniqueName: \"kubernetes.io/projected/65f06677-4cbf-41c9-a0da-02f49710c11c-kube-api-access-k24j2\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884875 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-systemd\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884894 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-bin\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884913 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-kubelet\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884931 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-var-lib-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.884950 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65f06677-4cbf-41c9-a0da-02f49710c11c-ovn-node-metrics-cert\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885157 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885174 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-netd\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885205 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-slash\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885223 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-netns\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885233 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-systemd-units\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885234 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-etc-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885277 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-node-log\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885344 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885362 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-bin\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885370 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-kubelet\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885366 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-systemd\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885381 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-var-lib-openvswitch\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885397 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-log-socket\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885420 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-ovn-kubernetes\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.885449 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-ovn\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.886006 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-env-overrides\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.886119 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-script-lib\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.886253 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-config\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.889051 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65f06677-4cbf-41c9-a0da-02f49710c11c-ovn-node-metrics-cert\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.900226 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.901033 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k24j2\" (UniqueName: \"kubernetes.io/projected/65f06677-4cbf-41c9-a0da-02f49710c11c-kube-api-access-k24j2\") pod \"ovnkube-node-jqlh6\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.913812 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.922878 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.935764 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.947595 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.958826 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.977967 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:54Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:54 crc kubenswrapper[4634]: I0929 13:44:54.999468 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-b59nb" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.006482 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: W0929 13:44:55.009511 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd1b6a92_008b_40ed_bbbb_15270d2f599a.slice/crio-3efc595f751578b38927ec8e9af00865396428c0d2c0dfa40c39b1942f1940aa WatchSource:0}: Error finding container 3efc595f751578b38927ec8e9af00865396428c0d2c0dfa40c39b1942f1940aa: Status 404 returned error can't find the container with id 3efc595f751578b38927ec8e9af00865396428c0d2c0dfa40c39b1942f1940aa Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.011659 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.032906 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: W0929 13:44:55.035781 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65f06677_4cbf_41c9_a0da_02f49710c11c.slice/crio-5d52462b034d716eab641671b1df7b79977796f8dd21f17843728c22f6ff035f WatchSource:0}: Error finding container 5d52462b034d716eab641671b1df7b79977796f8dd21f17843728c22f6ff035f: Status 404 returned error can't find the container with id 5d52462b034d716eab641671b1df7b79977796f8dd21f17843728c22f6ff035f Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.056955 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.073653 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.088979 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.109607 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.262334 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055" exitCode=0 Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.262406 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.262455 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"5d52462b034d716eab641671b1df7b79977796f8dd21f17843728c22f6ff035f"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.263719 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wtnjd" event={"ID":"77b5113e-50cd-417c-8991-cae5cd823f3f","Type":"ContainerStarted","Data":"9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.263748 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wtnjd" event={"ID":"77b5113e-50cd-417c-8991-cae5cd823f3f","Type":"ContainerStarted","Data":"250100900e59f17d1c244d434868f57aa208d581352a4ede99fa70435678376c"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.265799 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerStarted","Data":"54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.265830 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerStarted","Data":"3efc595f751578b38927ec8e9af00865396428c0d2c0dfa40c39b1942f1940aa"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.267483 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.267508 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.267518 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"7a9b32fb7033f30a91fa112446117bcb738ceb7636002ada53f756b553bec967"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.268887 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-sxkt4" event={"ID":"3c08520a-99c4-43f8-8cd3-6547f22c571b","Type":"ContainerStarted","Data":"d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.268911 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-sxkt4" event={"ID":"3c08520a-99c4-43f8-8cd3-6547f22c571b","Type":"ContainerStarted","Data":"ee4e59f6938ddd83b865fe6830c0d68dc6284c083f7f37cad8ed6e4884b86a80"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.281016 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.296409 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.307607 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.319383 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.329805 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.345772 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.365021 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.376966 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.391071 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.404452 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.423442 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.440284 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.464562 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.478480 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.496612 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.517515 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.538946 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.560377 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.582691 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.625029 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.664436 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.698852 4634 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.700522 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.700575 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.700586 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.700703 4634 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.701585 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.754460 4634 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.754917 4634 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.757557 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.757589 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.757598 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.757612 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.757620 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:55Z","lastTransitionTime":"2025-09-29T13:44:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:55 crc kubenswrapper[4634]: E0929 13:44:55.774841 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.778645 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.778676 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.778685 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.778698 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.778708 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:55Z","lastTransitionTime":"2025-09-29T13:44:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.783725 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: E0929 13:44:55.789711 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.792777 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.792806 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.792815 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.792828 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.792837 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:55Z","lastTransitionTime":"2025-09-29T13:44:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:55 crc kubenswrapper[4634]: E0929 13:44:55.803512 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.806455 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.806485 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.806494 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.806508 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.806516 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:55Z","lastTransitionTime":"2025-09-29T13:44:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:55 crc kubenswrapper[4634]: E0929 13:44:55.818546 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.821518 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.821555 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.821567 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.821583 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.821593 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:55Z","lastTransitionTime":"2025-09-29T13:44:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.833044 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: E0929 13:44:55.834425 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: E0929 13:44:55.834539 4634 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.835850 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.835876 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.835884 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.835898 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.835907 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:55Z","lastTransitionTime":"2025-09-29T13:44:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.860309 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.903235 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.938829 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.938865 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.938875 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.938894 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.938903 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:55Z","lastTransitionTime":"2025-09-29T13:44:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.943928 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:55 crc kubenswrapper[4634]: I0929 13:44:55.987516 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:55Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.041673 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.041713 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.041722 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.041737 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.041746 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.111689 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:56 crc kubenswrapper[4634]: E0929 13:44:56.111802 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.112166 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:56 crc kubenswrapper[4634]: E0929 13:44:56.112223 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.112270 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:56 crc kubenswrapper[4634]: E0929 13:44:56.112307 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.144227 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.144509 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.144519 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.144531 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.144540 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.246935 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.246968 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.246977 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.246990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.246999 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.273328 4634 generic.go:334] "Generic (PLEG): container finished" podID="fd1b6a92-008b-40ed-bbbb-15270d2f599a" containerID="54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463" exitCode=0 Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.273385 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerDied","Data":"54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.281843 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.281889 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.281903 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.281915 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.281925 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.281934 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.290369 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.303071 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.314780 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.326924 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.343330 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.353931 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.356361 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.356387 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.356399 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.356416 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.356428 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.370605 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.382048 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.398292 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.415231 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.425637 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.458760 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.458787 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.458796 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.458811 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.458819 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.464815 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.500043 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.543417 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:56Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.561105 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.561130 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.561138 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.561151 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.561159 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.663987 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.664045 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.664071 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.664120 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.664137 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.766120 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.766155 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.766164 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.766179 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.766189 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.869504 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.869820 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.869828 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.869843 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.869852 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.971956 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.971991 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.972002 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.972018 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:56 crc kubenswrapper[4634]: I0929 13:44:56.972026 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:56Z","lastTransitionTime":"2025-09-29T13:44:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.074326 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.074375 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.074386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.074698 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.074727 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.176796 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.177671 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.177787 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.177931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.178038 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.280146 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.280196 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.280209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.280230 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.280242 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.286628 4634 generic.go:334] "Generic (PLEG): container finished" podID="fd1b6a92-008b-40ed-bbbb-15270d2f599a" containerID="bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18" exitCode=0 Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.286668 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerDied","Data":"bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.299906 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.314987 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-qvsct"] Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.315337 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.317058 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.317111 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.317127 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.318357 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.318856 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.338439 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.353477 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.364993 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.378229 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.381869 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.381896 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.381904 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.381916 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.381924 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.387945 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.407515 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.410583 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c79a239-2e61-4c28-8d03-1f8cebce6190-host\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.410675 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0c79a239-2e61-4c28-8d03-1f8cebce6190-serviceca\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.410716 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cnjr\" (UniqueName: \"kubernetes.io/projected/0c79a239-2e61-4c28-8d03-1f8cebce6190-kube-api-access-9cnjr\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.420606 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.430251 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.440805 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.449494 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.462999 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.483773 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.483810 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.483820 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.483837 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.483848 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.484955 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.493708 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.506381 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.511365 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0c79a239-2e61-4c28-8d03-1f8cebce6190-serviceca\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.511404 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cnjr\" (UniqueName: \"kubernetes.io/projected/0c79a239-2e61-4c28-8d03-1f8cebce6190-kube-api-access-9cnjr\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.511429 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c79a239-2e61-4c28-8d03-1f8cebce6190-host\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.511473 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0c79a239-2e61-4c28-8d03-1f8cebce6190-host\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.512251 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/0c79a239-2e61-4c28-8d03-1f8cebce6190-serviceca\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.519047 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.530547 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cnjr\" (UniqueName: \"kubernetes.io/projected/0c79a239-2e61-4c28-8d03-1f8cebce6190-kube-api-access-9cnjr\") pod \"node-ca-qvsct\" (UID: \"0c79a239-2e61-4c28-8d03-1f8cebce6190\") " pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.536765 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.545264 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.558774 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.577919 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.586369 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.586416 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.586425 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.586438 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.586447 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.589010 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.599254 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.611154 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.638480 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qvsct" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.642507 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: W0929 13:44:57.651394 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c79a239_2e61_4c28_8d03_1f8cebce6190.slice/crio-1c53a06128fe06f713acf3d049683e176f3efe14054fd33baec16f69e610ae1b WatchSource:0}: Error finding container 1c53a06128fe06f713acf3d049683e176f3efe14054fd33baec16f69e610ae1b: Status 404 returned error can't find the container with id 1c53a06128fe06f713acf3d049683e176f3efe14054fd33baec16f69e610ae1b Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.684770 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.696242 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.696278 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.696287 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.696302 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.696312 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.714507 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.714707 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:45:05.714680625 +0000 UTC m=+36.283408414 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.715044 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.715116 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.715170 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.715225 4634 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.715249 4634 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.715250 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.715269 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:05.71525894 +0000 UTC m=+36.283986689 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.715280 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.715300 4634 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.715300 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:05.715283642 +0000 UTC m=+36.284011411 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.715345 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:05.715329713 +0000 UTC m=+36.284057482 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.722738 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.762067 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.798394 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.798427 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.798439 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.798455 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.798465 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.801954 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.815971 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.816131 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.816152 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.816163 4634 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:57 crc kubenswrapper[4634]: E0929 13:44:57.816211 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:05.81619855 +0000 UTC m=+36.384926299 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.901497 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.901542 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.901554 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.901570 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:57 crc kubenswrapper[4634]: I0929 13:44:57.901581 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:57Z","lastTransitionTime":"2025-09-29T13:44:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.003496 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.003542 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.003554 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.003568 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.003579 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.105583 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.105633 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.105649 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.105667 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.105682 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.110166 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.110220 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:44:58 crc kubenswrapper[4634]: E0929 13:44:58.110377 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.110409 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:44:58 crc kubenswrapper[4634]: E0929 13:44:58.110518 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:44:58 crc kubenswrapper[4634]: E0929 13:44:58.110653 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.208222 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.208286 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.208304 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.208332 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.208349 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.292127 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerDied","Data":"0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.292134 4634 generic.go:334] "Generic (PLEG): container finished" podID="fd1b6a92-008b-40ed-bbbb-15270d2f599a" containerID="0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba" exitCode=0 Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.294524 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qvsct" event={"ID":"0c79a239-2e61-4c28-8d03-1f8cebce6190","Type":"ContainerStarted","Data":"12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.294549 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qvsct" event={"ID":"0c79a239-2e61-4c28-8d03-1f8cebce6190","Type":"ContainerStarted","Data":"1c53a06128fe06f713acf3d049683e176f3efe14054fd33baec16f69e610ae1b"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.311940 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.311986 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.312001 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.312022 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.312037 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.316768 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.335904 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.351520 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.370029 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.386936 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.400962 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.416218 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.416254 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.416267 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.416306 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.416317 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.417449 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.431437 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.451703 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.462351 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.474650 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.502579 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.514667 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.518118 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.518151 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.518164 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.518182 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.518265 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.532012 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.542209 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.558665 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.570476 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.588559 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.599799 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.613228 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.620553 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.620590 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.620601 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.620618 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.620629 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.641244 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.681810 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.722926 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.723581 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.723615 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.723628 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.723645 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.723658 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.767211 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.802501 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.825569 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.825754 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.825770 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.825787 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.825797 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.840780 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.891726 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.928422 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.928478 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.928496 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.928520 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.928538 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:58Z","lastTransitionTime":"2025-09-29T13:44:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.929598 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:58 crc kubenswrapper[4634]: I0929 13:44:58.973255 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:58Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.004546 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.031649 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.031730 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.031753 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.031788 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.031815 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.135721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.135767 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.135778 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.135796 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.135807 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.237805 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.237845 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.237857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.237876 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.237887 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.302720 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.305007 4634 generic.go:334] "Generic (PLEG): container finished" podID="fd1b6a92-008b-40ed-bbbb-15270d2f599a" containerID="506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1" exitCode=0 Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.305052 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerDied","Data":"506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.326408 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.340521 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.340560 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.340569 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.340585 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.340595 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.341268 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.357734 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.373984 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.390257 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.403245 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.419268 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.430635 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.442623 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.442769 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.442814 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.442834 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.442845 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.451743 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.461424 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.481782 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.495480 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.522123 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.546645 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.546687 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.546698 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.546719 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.546731 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.560646 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.606782 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:44:59Z is after 2025-08-24T17:21:41Z" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.648918 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.648951 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.648960 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.648976 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.648985 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.752297 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.752354 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.752367 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.752393 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.752407 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.856068 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.856184 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.856204 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.856235 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.856256 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.958447 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.958500 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.958516 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.958539 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:44:59 crc kubenswrapper[4634]: I0929 13:44:59.958557 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:44:59Z","lastTransitionTime":"2025-09-29T13:44:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.061457 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.061510 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.061525 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.061544 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.061557 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.110160 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:00 crc kubenswrapper[4634]: E0929 13:45:00.110481 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.110229 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:00 crc kubenswrapper[4634]: E0929 13:45:00.110751 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.110147 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:00 crc kubenswrapper[4634]: E0929 13:45:00.111011 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.145864 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.160591 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.163787 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.163826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.163842 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.163861 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.163876 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.178731 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.190559 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.208303 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.248399 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.275906 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.275936 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.276125 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.276137 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.276155 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.276167 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.307179 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.315324 4634 generic.go:334] "Generic (PLEG): container finished" podID="fd1b6a92-008b-40ed-bbbb-15270d2f599a" containerID="1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0" exitCode=0 Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.315366 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerDied","Data":"1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.337624 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.352412 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.363596 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.377423 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.378477 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.378520 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.378532 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.378550 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.378563 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.388192 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.410104 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.419791 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.430968 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.441755 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.458574 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.468977 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.488646 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.488923 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.488931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.488945 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.488955 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.495580 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.513221 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.523828 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.535565 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.562328 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.590711 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.590745 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.590755 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.590770 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.590780 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.601944 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.641244 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.684023 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.692671 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.692704 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.692716 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.692731 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.692741 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.723766 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.764041 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.794616 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.794659 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.794670 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.794684 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.794694 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.802431 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.897109 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.897129 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.897137 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.897148 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.897157 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.999225 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.999280 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.999292 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.999349 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:00 crc kubenswrapper[4634]: I0929 13:45:00.999362 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:00Z","lastTransitionTime":"2025-09-29T13:45:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.102168 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.102293 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.102310 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.102327 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.102390 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.205351 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.205386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.205396 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.205410 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.205418 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.308190 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.308231 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.308240 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.308254 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.308263 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.322431 4634 generic.go:334] "Generic (PLEG): container finished" podID="fd1b6a92-008b-40ed-bbbb-15270d2f599a" containerID="d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41" exitCode=0 Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.322490 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerDied","Data":"d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.330304 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.330960 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.331015 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.353882 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.368862 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.371346 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.372021 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.385195 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.396723 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.411414 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.411458 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.411474 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.411496 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.411511 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.414551 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.428723 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.442299 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.456350 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.472959 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.485521 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.495345 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.506993 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.514254 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.514284 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.514294 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.514308 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.514317 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.518304 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.534327 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.542815 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.554389 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.565345 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.576803 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.586249 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.606864 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.616536 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.616572 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.616583 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.616599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.616608 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.640299 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.682391 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.719050 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.719099 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.719110 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.719123 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.719130 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.721960 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.761923 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.799736 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.820981 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.821029 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.821041 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.821063 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.821076 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.843749 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.887189 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.922665 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.923260 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.923357 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.923432 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.923497 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.923561 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:01Z","lastTransitionTime":"2025-09-29T13:45:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:01 crc kubenswrapper[4634]: I0929 13:45:01.961040 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:01Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.002273 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.025958 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.025998 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.026009 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.026027 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.026040 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.109814 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.109834 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:02 crc kubenswrapper[4634]: E0929 13:45:02.109984 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:02 crc kubenswrapper[4634]: E0929 13:45:02.110055 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.109835 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:02 crc kubenswrapper[4634]: E0929 13:45:02.110180 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.127832 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.128138 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.128264 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.128369 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.128476 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.231467 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.231562 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.231586 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.231614 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.231633 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.337736 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.337782 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.337796 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.337812 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.337823 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.341009 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" event={"ID":"fd1b6a92-008b-40ed-bbbb-15270d2f599a","Type":"ContainerStarted","Data":"dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.341077 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.360574 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.378420 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.394371 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.407039 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.424464 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.439874 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.439940 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.439965 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.439996 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.440017 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.446193 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.461204 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.490684 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.504400 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.516058 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.527600 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.542857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.542901 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.542912 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.542927 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.542940 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.545178 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.557461 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.570703 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.601396 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:02Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.644852 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.644883 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.644893 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.644910 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.644923 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.746768 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.746793 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.746802 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.746815 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.746825 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.848759 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.848978 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.849049 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.849129 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.849209 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.950839 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.951069 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.951159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.951234 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:02 crc kubenswrapper[4634]: I0929 13:45:02.951312 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:02Z","lastTransitionTime":"2025-09-29T13:45:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.053657 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.053923 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.054024 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.054132 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.054238 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.156680 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.156712 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.156721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.156737 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.156746 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.260008 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.260047 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.260058 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.260074 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.260101 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.343480 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.362076 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.362118 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.362126 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.362140 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.362149 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.464656 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.464691 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.464700 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.464715 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.464725 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.567073 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.567121 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.567132 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.567146 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.567155 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.669595 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.669629 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.669642 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.669665 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.669674 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.771975 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.772006 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.772017 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.772030 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.772040 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.874843 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.874890 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.874904 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.874921 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.874934 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.977144 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.977193 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.977209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.977229 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:03 crc kubenswrapper[4634]: I0929 13:45:03.977247 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:03Z","lastTransitionTime":"2025-09-29T13:45:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.079768 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.079817 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.079833 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.079854 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.079868 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.110311 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:04 crc kubenswrapper[4634]: E0929 13:45:04.110442 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.111113 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:04 crc kubenswrapper[4634]: E0929 13:45:04.111190 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.111294 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:04 crc kubenswrapper[4634]: E0929 13:45:04.111480 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.182712 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.182780 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.182793 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.182811 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.182822 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.285455 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.285495 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.285505 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.285523 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.285535 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.348382 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/0.log" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.351062 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f" exitCode=1 Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.351117 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.351728 4634 scope.go:117] "RemoveContainer" containerID="5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.384368 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.388055 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.388209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.388274 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.388358 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.388416 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.398149 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.411787 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.422074 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.438850 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.454523 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.466777 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.477525 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.490062 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.491658 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.491688 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.491697 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.491710 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.491720 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.509234 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.518758 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.528146 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.540378 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.557510 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:03Z\\\",\\\"message\\\":\\\"or *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0929 13:45:03.527327 5849 factory.go:656] Stopping watch factory\\\\nI0929 13:45:03.527359 5849 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:03.527392 5849 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 13:45:03.527440 5849 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527689 5849 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527442 5849 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 13:45:03.527135 5849 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527918 5849 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527225 5849 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527066 5849 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.568924 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:04Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.593884 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.593923 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.593931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.593946 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.593990 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.699619 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.699654 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.699661 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.699675 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.699684 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.802039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.802101 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.802113 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.802151 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.802163 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.904820 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.904861 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.904872 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.904890 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:04 crc kubenswrapper[4634]: I0929 13:45:04.904901 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:04Z","lastTransitionTime":"2025-09-29T13:45:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.007010 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.007042 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.007050 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.007063 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.007072 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.109518 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.109545 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.109553 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.109567 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.109576 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.109984 4634 scope.go:117] "RemoveContainer" containerID="0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.211519 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.211801 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.211811 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.211825 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.211836 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.315940 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.316020 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.316044 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.316080 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.316156 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.355033 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.356723 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.357050 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.358369 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/1.log" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.359006 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/0.log" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.361382 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d" exitCode=1 Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.361413 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.361450 4634 scope.go:117] "RemoveContainer" containerID="5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.362031 4634 scope.go:117] "RemoveContainer" containerID="119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d" Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.362170 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.375379 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.389360 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.401274 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.411745 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.418353 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.418387 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.418398 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.418413 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.418425 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.422582 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.441372 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:03Z\\\",\\\"message\\\":\\\"or *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0929 13:45:03.527327 5849 factory.go:656] Stopping watch factory\\\\nI0929 13:45:03.527359 5849 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:03.527392 5849 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 13:45:03.527440 5849 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527689 5849 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527442 5849 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 13:45:03.527135 5849 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527918 5849 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527225 5849 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527066 5849 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.454063 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.472142 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.490535 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.506570 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.515493 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.520425 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.520571 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.520663 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.520743 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.520826 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.527894 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.539174 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.550506 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.564290 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.578893 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.590172 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.602657 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.615867 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.623417 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.623456 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.623465 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.623481 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.623494 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.628947 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.640462 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.654171 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.667702 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.687430 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:03Z\\\",\\\"message\\\":\\\"or *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0929 13:45:03.527327 5849 factory.go:656] Stopping watch factory\\\\nI0929 13:45:03.527359 5849 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:03.527392 5849 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 13:45:03.527440 5849 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527689 5849 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527442 5849 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 13:45:03.527135 5849 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527918 5849 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527225 5849 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527066 5849 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.698108 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.717192 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.725726 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.725774 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.725785 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.725804 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.726262 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.729824 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.740501 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.748885 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.760398 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:05Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.792143 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.792377 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:45:21.792349742 +0000 UTC m=+52.361077501 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.792544 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.792691 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.792842 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.792740 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.792805 4634 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.793226 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:21.793206785 +0000 UTC m=+52.361934534 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.792953 4634 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.793442 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:21.793427921 +0000 UTC m=+52.362155670 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.793543 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.793688 4634 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.793776 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:21.793764941 +0000 UTC m=+52.362492760 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.828281 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.828489 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.828576 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.828675 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.828769 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.893545 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.893692 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.893708 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.893718 4634 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:05 crc kubenswrapper[4634]: E0929 13:45:05.893767 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:21.893754884 +0000 UTC m=+52.462482633 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.931357 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.931400 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.931412 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.931432 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:05 crc kubenswrapper[4634]: I0929 13:45:05.931444 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:05Z","lastTransitionTime":"2025-09-29T13:45:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.033357 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.033393 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.033401 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.033416 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.033426 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.071258 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.071518 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.071753 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.071858 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.071964 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.087641 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.091114 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.091155 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.091167 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.091187 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.091200 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.103827 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.107807 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.107847 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.107856 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.107870 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.107881 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.109237 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.109274 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.109323 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.109432 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.109565 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.109401 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.122614 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.126790 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.126828 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.126840 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.126857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.126869 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.139923 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.144208 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.144273 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.144284 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.144306 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.144320 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.157172 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: E0929 13:45:06.157333 4634 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.159229 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.159329 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.159396 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.159475 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.159571 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.201384 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn"] Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.201994 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.204388 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.206223 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.225767 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.238720 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.250854 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.261693 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.262794 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.262834 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.262849 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.262868 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.262881 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.277959 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.291574 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.296727 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbkjw\" (UniqueName: \"kubernetes.io/projected/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-kube-api-access-rbkjw\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.296880 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.296986 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.297076 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.305624 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.321344 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.333789 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.344950 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.357642 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.366819 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.366881 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.366900 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.366926 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.366943 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.367751 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/1.log" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.378969 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.396290 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.397663 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.397784 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbkjw\" (UniqueName: \"kubernetes.io/projected/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-kube-api-access-rbkjw\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.397830 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.397861 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.398976 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-env-overrides\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.399153 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.408500 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.412644 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.413628 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbkjw\" (UniqueName: \"kubernetes.io/projected/2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6-kube-api-access-rbkjw\") pod \"ovnkube-control-plane-749d76644c-vgbtn\" (UID: \"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.448016 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:03Z\\\",\\\"message\\\":\\\"or *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0929 13:45:03.527327 5849 factory.go:656] Stopping watch factory\\\\nI0929 13:45:03.527359 5849 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:03.527392 5849 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 13:45:03.527440 5849 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527689 5849 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527442 5849 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 13:45:03.527135 5849 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527918 5849 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527225 5849 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527066 5849 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.460133 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:06Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.469631 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.469661 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.469670 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.469684 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.469695 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.514218 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" Sep 29 13:45:06 crc kubenswrapper[4634]: W0929 13:45:06.534835 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a9def76_5f53_4d1b_bf94_cb2fb8a89ea6.slice/crio-030f52fcec8148cdcbbc96edc225a0dcf3be244668ef24d552b2d30bc362b420 WatchSource:0}: Error finding container 030f52fcec8148cdcbbc96edc225a0dcf3be244668ef24d552b2d30bc362b420: Status 404 returned error can't find the container with id 030f52fcec8148cdcbbc96edc225a0dcf3be244668ef24d552b2d30bc362b420 Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.571981 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.572024 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.572034 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.572048 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.572060 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.674331 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.674354 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.674365 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.674407 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.674421 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.776686 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.776734 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.776746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.776763 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.776776 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.878515 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.878591 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.878603 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.878620 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.878631 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.981163 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.981210 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.981220 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.981237 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:06 crc kubenswrapper[4634]: I0929 13:45:06.981248 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:06Z","lastTransitionTime":"2025-09-29T13:45:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.083591 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.083627 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.083635 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.083649 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.083658 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.185895 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.185938 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.185948 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.185962 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.185975 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.288444 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.288501 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.288513 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.288530 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.288542 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.376013 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" event={"ID":"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6","Type":"ContainerStarted","Data":"12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.376055 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" event={"ID":"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6","Type":"ContainerStarted","Data":"1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.376064 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" event={"ID":"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6","Type":"ContainerStarted","Data":"030f52fcec8148cdcbbc96edc225a0dcf3be244668ef24d552b2d30bc362b420"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.391418 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.391514 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.391545 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.391576 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.391598 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.395976 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.409245 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.425793 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.435962 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.449446 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.463506 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.478550 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.490199 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.497012 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.497069 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.497103 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.497122 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.497137 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.505360 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.517480 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.528749 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.540525 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.556486 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.567515 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.596250 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:03Z\\\",\\\"message\\\":\\\"or *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0929 13:45:03.527327 5849 factory.go:656] Stopping watch factory\\\\nI0929 13:45:03.527359 5849 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:03.527392 5849 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 13:45:03.527440 5849 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527689 5849 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527442 5849 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 13:45:03.527135 5849 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527918 5849 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527225 5849 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527066 5849 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.600015 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.600039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.600047 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.600059 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.600069 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.608945 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.632820 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-nl5xm"] Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.633431 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:07 crc kubenswrapper[4634]: E0929 13:45:07.633515 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.650349 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.663499 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.684908 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:03Z\\\",\\\"message\\\":\\\"or *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0929 13:45:03.527327 5849 factory.go:656] Stopping watch factory\\\\nI0929 13:45:03.527359 5849 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:03.527392 5849 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 13:45:03.527440 5849 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527689 5849 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527442 5849 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 13:45:03.527135 5849 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527918 5849 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527225 5849 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527066 5849 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.699803 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.702746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.702780 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.702790 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.702807 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.702820 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.707067 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92zjw\" (UniqueName: \"kubernetes.io/projected/85c1b26c-a922-4d3c-934f-e6968735a76e-kube-api-access-92zjw\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.707119 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.724868 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.739606 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.755038 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.766336 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.788239 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.802498 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.805235 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.805265 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.805276 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.805293 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.805305 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.808423 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92zjw\" (UniqueName: \"kubernetes.io/projected/85c1b26c-a922-4d3c-934f-e6968735a76e-kube-api-access-92zjw\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.808480 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:07 crc kubenswrapper[4634]: E0929 13:45:07.808630 4634 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:07 crc kubenswrapper[4634]: E0929 13:45:07.808709 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs podName:85c1b26c-a922-4d3c-934f-e6968735a76e nodeName:}" failed. No retries permitted until 2025-09-29 13:45:08.308686527 +0000 UTC m=+38.877414316 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs") pod "network-metrics-daemon-nl5xm" (UID: "85c1b26c-a922-4d3c-934f-e6968735a76e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.817713 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.827169 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92zjw\" (UniqueName: \"kubernetes.io/projected/85c1b26c-a922-4d3c-934f-e6968735a76e-kube-api-access-92zjw\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.831173 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.843990 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.862894 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.877524 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.889871 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.900372 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.907926 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.907973 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.907991 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.908012 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:07 crc kubenswrapper[4634]: I0929 13:45:07.908037 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:07Z","lastTransitionTime":"2025-09-29T13:45:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.010232 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.010273 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.010282 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.010296 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.010308 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.109281 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:08 crc kubenswrapper[4634]: E0929 13:45:08.109469 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.109835 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:08 crc kubenswrapper[4634]: E0929 13:45:08.109971 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.110071 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:08 crc kubenswrapper[4634]: E0929 13:45:08.110319 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.112831 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.112909 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.112935 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.112964 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.112991 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.215957 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.215985 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.215994 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.216008 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.216018 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.312339 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:08 crc kubenswrapper[4634]: E0929 13:45:08.312445 4634 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:08 crc kubenswrapper[4634]: E0929 13:45:08.312512 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs podName:85c1b26c-a922-4d3c-934f-e6968735a76e nodeName:}" failed. No retries permitted until 2025-09-29 13:45:09.312493288 +0000 UTC m=+39.881221037 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs") pod "network-metrics-daemon-nl5xm" (UID: "85c1b26c-a922-4d3c-934f-e6968735a76e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.318878 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.318906 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.318916 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.318934 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.318944 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.422844 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.422900 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.422913 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.422939 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.422953 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.526721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.527105 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.527120 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.527141 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.527155 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.668062 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.668113 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.668124 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.668138 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.668167 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.771647 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.771865 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.771935 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.772014 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.772075 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.874444 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.874682 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.874757 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.874839 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.874922 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.977889 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.977949 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.977966 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.977988 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:08 crc kubenswrapper[4634]: I0929 13:45:08.978006 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:08Z","lastTransitionTime":"2025-09-29T13:45:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.081155 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.081629 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.081848 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.082075 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.082325 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.109925 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:09 crc kubenswrapper[4634]: E0929 13:45:09.110475 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.184724 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.184761 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.184799 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.184816 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.184828 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.288077 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.288478 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.288681 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.288888 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.289122 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.373947 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:09 crc kubenswrapper[4634]: E0929 13:45:09.374135 4634 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:09 crc kubenswrapper[4634]: E0929 13:45:09.374391 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs podName:85c1b26c-a922-4d3c-934f-e6968735a76e nodeName:}" failed. No retries permitted until 2025-09-29 13:45:11.374373019 +0000 UTC m=+41.943100778 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs") pod "network-metrics-daemon-nl5xm" (UID: "85c1b26c-a922-4d3c-934f-e6968735a76e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.392181 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.392363 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.392401 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.392431 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.392453 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.496315 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.496443 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.496468 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.496498 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.496521 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.599832 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.599907 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.599934 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.599965 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.600024 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.702365 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.702396 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.702404 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.702417 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.702425 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.805253 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.805296 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.805307 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.805320 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.805332 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.908153 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.908193 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.908202 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.908216 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:09 crc kubenswrapper[4634]: I0929 13:45:09.908228 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:09Z","lastTransitionTime":"2025-09-29T13:45:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.011214 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.011288 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.011364 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.011398 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.011482 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.110160 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.110164 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:10 crc kubenswrapper[4634]: E0929 13:45:10.110346 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:10 crc kubenswrapper[4634]: E0929 13:45:10.110417 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.110177 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:10 crc kubenswrapper[4634]: E0929 13:45:10.110669 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.114434 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.114512 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.114557 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.114583 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.114620 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.131813 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.147311 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.183633 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a3fa5794b39c5dd918b37c6c64a73138a7712ed249140856dc3215a8766062f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:03Z\\\",\\\"message\\\":\\\"or *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI0929 13:45:03.527327 5849 factory.go:656] Stopping watch factory\\\\nI0929 13:45:03.527359 5849 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:03.527392 5849 handler.go:208] Removed *v1.Node event handler 7\\\\nI0929 13:45:03.527440 5849 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527689 5849 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527442 5849 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0929 13:45:03.527135 5849 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527918 5849 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527225 5849 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:03.527066 5849 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.202175 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.216966 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.217622 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.217662 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.217674 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.217692 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.217704 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.251220 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.270424 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.284435 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.293970 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.309705 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.319966 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.320008 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.320017 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.320044 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.320055 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.322136 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.335175 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.347928 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.364022 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.378974 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.395001 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.406706 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:10Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.422486 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.422521 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.422535 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.422568 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.422581 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.524605 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.524643 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.524654 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.524668 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.524678 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.626613 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.626644 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.626653 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.626666 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.626674 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.729296 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.729347 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.729361 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.729383 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.729397 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.831908 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.831969 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.831987 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.832011 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.832027 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.934842 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.934902 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.934919 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.934942 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:10 crc kubenswrapper[4634]: I0929 13:45:10.934963 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:10Z","lastTransitionTime":"2025-09-29T13:45:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.038051 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.038155 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.038180 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.038215 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.038236 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.109952 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:11 crc kubenswrapper[4634]: E0929 13:45:11.110520 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.140060 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.140154 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.140181 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.140211 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.140229 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.243141 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.243207 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.243231 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.243276 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.243298 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.346813 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.346972 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.347001 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.347028 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.347064 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.394056 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:11 crc kubenswrapper[4634]: E0929 13:45:11.394263 4634 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:11 crc kubenswrapper[4634]: E0929 13:45:11.394356 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs podName:85c1b26c-a922-4d3c-934f-e6968735a76e nodeName:}" failed. No retries permitted until 2025-09-29 13:45:15.394330213 +0000 UTC m=+45.963058002 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs") pod "network-metrics-daemon-nl5xm" (UID: "85c1b26c-a922-4d3c-934f-e6968735a76e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.450645 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.450705 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.450727 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.450755 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.450778 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.554136 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.554200 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.554225 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.554251 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.554274 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.657046 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.657132 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.657140 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.657153 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.657162 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.759998 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.760142 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.760168 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.760209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.760222 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.862178 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.862236 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.862249 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.862265 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.862276 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.964841 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.965143 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.965269 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.965359 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:11 crc kubenswrapper[4634]: I0929 13:45:11.965444 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:11Z","lastTransitionTime":"2025-09-29T13:45:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.068029 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.068065 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.068076 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.068113 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.068124 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.109736 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.109786 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:12 crc kubenswrapper[4634]: E0929 13:45:12.109868 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.109893 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:12 crc kubenswrapper[4634]: E0929 13:45:12.110151 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:12 crc kubenswrapper[4634]: E0929 13:45:12.110022 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.169892 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.169916 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.169925 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.169937 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.169945 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.273164 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.273232 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.273256 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.273284 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.273304 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.376142 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.376205 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.376227 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.376257 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.376279 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.478535 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.478582 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.478593 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.478611 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.478624 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.580673 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.580703 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.580710 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.580722 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.580730 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.683261 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.683337 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.683361 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.683395 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.683420 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.785940 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.785980 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.785990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.786007 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.786018 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.889617 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.889671 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.889690 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.889746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.889764 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.992434 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.992492 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.992513 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.992544 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:12 crc kubenswrapper[4634]: I0929 13:45:12.992569 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:12Z","lastTransitionTime":"2025-09-29T13:45:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.094944 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.094977 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.094990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.095008 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.095022 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.109255 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:13 crc kubenswrapper[4634]: E0929 13:45:13.109413 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.197943 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.198291 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.198433 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.198627 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.198759 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.302618 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.302697 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.302717 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.302743 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.302761 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.405599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.405672 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.405692 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.405715 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.405732 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.508889 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.508950 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.508963 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.508987 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.509001 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.612346 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.612458 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.612480 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.612525 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.612545 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.715319 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.715374 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.715393 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.715421 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.715441 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.819472 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.819735 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.819825 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.819907 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.819985 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.922844 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.922887 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.922902 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.922920 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:13 crc kubenswrapper[4634]: I0929 13:45:13.922935 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:13Z","lastTransitionTime":"2025-09-29T13:45:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.025196 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.025233 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.025241 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.025255 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.025264 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.109501 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.109601 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:14 crc kubenswrapper[4634]: E0929 13:45:14.109638 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.109689 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:14 crc kubenswrapper[4634]: E0929 13:45:14.109828 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:14 crc kubenswrapper[4634]: E0929 13:45:14.109945 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.127376 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.127404 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.127414 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.127428 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.127439 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.230256 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.230321 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.230338 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.230367 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.230386 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.332476 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.332516 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.332531 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.332551 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.332563 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.435739 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.435774 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.435783 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.435797 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.435807 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.538061 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.538135 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.538147 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.538162 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.538171 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.640947 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.640981 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.640990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.641004 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.641013 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.743947 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.743998 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.744008 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.744023 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.744036 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.847248 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.847316 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.847328 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.847346 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.847357 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.950485 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.950562 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.950579 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.950602 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:14 crc kubenswrapper[4634]: I0929 13:45:14.950618 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:14Z","lastTransitionTime":"2025-09-29T13:45:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.054549 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.054964 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.055057 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.055166 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.055259 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.109334 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:15 crc kubenswrapper[4634]: E0929 13:45:15.109603 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.159385 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.159448 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.159464 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.159486 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.159509 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.262394 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.262456 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.262475 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.262499 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.262516 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.365826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.365882 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.365905 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.365933 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.365956 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.436658 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:15 crc kubenswrapper[4634]: E0929 13:45:15.436856 4634 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:15 crc kubenswrapper[4634]: E0929 13:45:15.436969 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs podName:85c1b26c-a922-4d3c-934f-e6968735a76e nodeName:}" failed. No retries permitted until 2025-09-29 13:45:23.436940987 +0000 UTC m=+54.005668776 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs") pod "network-metrics-daemon-nl5xm" (UID: "85c1b26c-a922-4d3c-934f-e6968735a76e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.468526 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.468624 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.468650 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.468711 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.468734 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.576260 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.576335 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.576357 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.576385 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.576405 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.679504 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.679546 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.679555 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.679571 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.679581 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.782855 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.782895 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.782904 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.782921 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.782930 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.886059 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.886177 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.886231 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.886256 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.886274 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.988694 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.988721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.988733 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.988747 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:15 crc kubenswrapper[4634]: I0929 13:45:15.988757 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:15Z","lastTransitionTime":"2025-09-29T13:45:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.091242 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.091315 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.091341 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.091368 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.091428 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.109660 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.109738 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.109682 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.109897 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.110022 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.110189 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.186124 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.186192 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.186209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.186234 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.186252 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.205269 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:16Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.209683 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.209735 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.209752 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.209778 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.209795 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.222724 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:16Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.227228 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.227310 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.227328 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.227377 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.227395 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.244671 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:16Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.249464 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.249522 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.249541 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.249566 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.249586 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.268979 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:16Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.273726 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.273799 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.273811 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.273826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.274235 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.292997 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:16Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:16 crc kubenswrapper[4634]: E0929 13:45:16.293273 4634 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.295000 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.295059 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.295077 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.295149 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.295170 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.397309 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.397353 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.397366 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.397383 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.397395 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.499495 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.499558 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.499576 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.499598 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.499653 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.602215 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.602268 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.602299 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.602317 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.602328 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.705452 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.705585 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.705611 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.705649 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.705669 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.808809 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.808869 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.808889 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.808915 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.808933 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.912052 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.912149 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.912171 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.912194 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:16 crc kubenswrapper[4634]: I0929 13:45:16.912212 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:16Z","lastTransitionTime":"2025-09-29T13:45:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.014865 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.014929 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.014946 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.014969 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.014986 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.109512 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:17 crc kubenswrapper[4634]: E0929 13:45:17.109762 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.117564 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.117617 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.117630 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.117648 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.117657 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.220532 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.220597 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.220621 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.220651 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.220676 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.324186 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.324254 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.324287 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.324312 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.324332 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.428714 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.428781 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.428806 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.428835 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.428857 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.532765 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.532842 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.532868 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.532898 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.532941 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.635801 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.635857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.635877 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.635901 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.635919 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.739631 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.739721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.739737 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.739761 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.739809 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.843617 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.843667 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.843678 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.843701 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.843713 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.915556 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.916737 4634 scope.go:117] "RemoveContainer" containerID="119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.932506 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:17Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.946510 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.946573 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.946595 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.946626 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.946646 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:17Z","lastTransitionTime":"2025-09-29T13:45:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.963682 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:17Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.976294 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:17Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:17 crc kubenswrapper[4634]: I0929 13:45:17.990550 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:17Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.003899 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.016582 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.031838 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.047775 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.049663 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.049828 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.049914 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.050018 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.050136 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.077955 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.092257 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.109481 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.109507 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:18 crc kubenswrapper[4634]: E0929 13:45:18.109610 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.109946 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:18 crc kubenswrapper[4634]: E0929 13:45:18.110007 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:18 crc kubenswrapper[4634]: E0929 13:45:18.110055 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.117137 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.133523 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.143321 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.152698 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.152797 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.152813 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.152831 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.152843 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.154348 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.164842 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.176856 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.189376 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.254721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.254755 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.254766 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.254781 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.254793 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.356715 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.356739 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.356746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.356760 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.356768 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.415567 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/1.log" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.418183 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.418664 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.433289 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.445787 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.458411 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.458660 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.458689 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.458698 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.458712 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.458721 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.476782 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.492022 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.503125 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.515272 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.528130 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.538125 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.554783 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.561126 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.561174 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.561188 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.561207 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.561222 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.567399 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.579610 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.591467 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.603744 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.612274 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.624102 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.645429 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:18Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.662948 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.662971 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.662978 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.662990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.662998 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.766426 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.766684 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.766694 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.766707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.766715 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.869243 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.869327 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.869346 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.869799 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.869860 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.971915 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.971973 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.971990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.972013 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:18 crc kubenswrapper[4634]: I0929 13:45:18.972040 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:18Z","lastTransitionTime":"2025-09-29T13:45:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.075715 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.075781 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.075798 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.075821 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.075839 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.109276 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:19 crc kubenswrapper[4634]: E0929 13:45:19.109734 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.179055 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.179159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.179178 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.179202 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.179219 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.282459 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.282539 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.282558 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.282581 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.282597 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.384510 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.384563 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.384581 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.384605 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.384626 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.423901 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/2.log" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.424755 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/1.log" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.428682 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66" exitCode=1 Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.428735 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.428780 4634 scope.go:117] "RemoveContainer" containerID="119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.429915 4634 scope.go:117] "RemoveContainer" containerID="ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66" Sep 29 13:45:19 crc kubenswrapper[4634]: E0929 13:45:19.430205 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.469966 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.489103 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.489147 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.489162 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.489221 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.489239 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.491606 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.512432 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.523801 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.541914 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.559202 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.578033 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.591817 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.592522 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.592567 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.592579 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.592597 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.592609 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.608517 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.625843 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.647149 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.662216 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.675410 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.696260 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.696331 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.696342 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.696386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.696401 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.696483 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.713646 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.732215 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.741717 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.745910 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.756076 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.769272 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.789022 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.790963 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.798739 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.798767 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.798794 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.798809 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.798818 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.801689 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.802785 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.811392 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.820666 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.830481 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.838455 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.853663 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.861908 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.872675 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.883150 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.892351 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.901226 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.901264 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.901274 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.901287 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.901299 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:19Z","lastTransitionTime":"2025-09-29T13:45:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.903187 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.914704 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.923609 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.940393 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.952271 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.963795 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.974617 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.984842 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:19 crc kubenswrapper[4634]: I0929 13:45:19.996323 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:19Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.003256 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.003280 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.003288 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.003301 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.003309 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.008935 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.021679 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.032983 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.047574 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.059537 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.086947 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.100819 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.105112 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.105138 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.105146 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.105159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.105168 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.109803 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:20 crc kubenswrapper[4634]: E0929 13:45:20.109991 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.110296 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:20 crc kubenswrapper[4634]: E0929 13:45:20.110430 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.110546 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:20 crc kubenswrapper[4634]: E0929 13:45:20.110670 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.116231 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.128177 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.140400 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.153248 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.171614 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.203670 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.210506 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.210543 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.210557 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.210574 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.210587 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.219760 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.230763 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.244682 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.263866 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.276680 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.289203 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.310014 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.312641 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.312681 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.312696 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.312716 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.312732 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.324673 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.345334 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.358568 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.371102 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.381253 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.391950 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.404281 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.415492 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.415537 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.415549 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.415568 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.415580 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.421278 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://119cb4981db6a6264eb26a8b544b61082b26177ed0f8b1d3e27b32b5aad9f07d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:05Z\\\",\\\"message\\\":\\\"5028 5981 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035222 5981 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.035663 5981 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.036371 5981 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 13:45:05.040461 5981 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0929 13:45:05.040497 5981 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0929 13:45:05.040518 5981 factory.go:656] Stopping watch factory\\\\nI0929 13:45:05.040537 5981 handler.go:208] Removed *v1.Node event handler 2\\\\nI0929 13:45:05.040545 5981 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0929 13:45:05.058869 5981 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0929 13:45:05.058887 5981 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0929 13:45:05.058923 5981 ovnkube.go:599] Stopped ovnkube\\\\nI0929 13:45:05.058940 5981 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0929 13:45:05.059003 5981 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.433483 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/2.log" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.434235 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.437662 4634 scope.go:117] "RemoveContainer" containerID="ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66" Sep 29 13:45:20 crc kubenswrapper[4634]: E0929 13:45:20.437919 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.446781 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.457004 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.470003 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.483430 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.497056 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.509641 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.517277 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.517312 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.517320 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.517334 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.517345 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.524797 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.535276 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.543888 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.585236 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.620455 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.620544 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.620561 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.620585 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.620603 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.627782 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.664321 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.701695 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.723709 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.723772 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.723782 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.723797 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.723810 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.745545 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.783594 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.826200 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.826803 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.826877 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.826899 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.826933 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.826957 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.862318 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.910765 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.934118 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.934377 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.935704 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.935735 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.935755 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:20Z","lastTransitionTime":"2025-09-29T13:45:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.947534 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:20 crc kubenswrapper[4634]: I0929 13:45:20.986869 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:20Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.038836 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.038872 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.038881 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.038918 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.038928 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.110071 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.110283 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.141710 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.141759 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.141775 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.141799 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.141815 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.244629 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.244677 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.244692 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.244711 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.244724 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.348159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.348216 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.348409 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.348434 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.348451 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.450819 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.450884 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.450905 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.450932 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.450953 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.553327 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.553713 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.553990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.554219 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.554453 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.658415 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.658468 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.658486 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.658506 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.658522 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.761338 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.761925 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.762274 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.763000 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.763356 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.806934 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.807004 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.807038 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.807080 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.807256 4634 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.807318 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:53.807299668 +0000 UTC m=+84.376027427 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.807516 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:45:53.807505403 +0000 UTC m=+84.376233162 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.807595 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.807609 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.807623 4634 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.807652 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:53.807643807 +0000 UTC m=+84.376371566 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.807980 4634 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.808015 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:53.808004447 +0000 UTC m=+84.376732216 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.865856 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.866077 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.866181 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.866299 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.866382 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.907835 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.907987 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.908019 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.908036 4634 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:21 crc kubenswrapper[4634]: E0929 13:45:21.908124 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 13:45:53.908102353 +0000 UTC m=+84.476830122 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.969196 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.969227 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.969235 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.969247 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:21 crc kubenswrapper[4634]: I0929 13:45:21.969255 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:21Z","lastTransitionTime":"2025-09-29T13:45:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.072120 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.072179 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.072196 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.072222 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.072240 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.109256 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.109350 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:22 crc kubenswrapper[4634]: E0929 13:45:22.109437 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.109469 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:22 crc kubenswrapper[4634]: E0929 13:45:22.109565 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:22 crc kubenswrapper[4634]: E0929 13:45:22.109637 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.175103 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.175129 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.175139 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.175153 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.175164 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.277928 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.277974 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.277992 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.278018 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.278034 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.380895 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.381391 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.381598 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.381838 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.382039 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.485564 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.485896 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.486170 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.486426 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.486646 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.589212 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.589260 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.589279 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.589298 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.589312 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.692435 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.692847 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.693120 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.693358 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.693576 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.796384 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.796443 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.796454 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.796468 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.796479 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.899207 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.899242 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.899270 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.899299 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:22 crc kubenswrapper[4634]: I0929 13:45:22.899310 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:22Z","lastTransitionTime":"2025-09-29T13:45:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.002578 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.003024 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.003397 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.003571 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.003706 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.106831 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.106884 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.106901 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.106925 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.106941 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.110208 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:23 crc kubenswrapper[4634]: E0929 13:45:23.110428 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.209556 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.209609 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.209626 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.209648 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.209669 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.312483 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.312567 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.312898 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.312932 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.312949 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.415834 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.415894 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.415917 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.415944 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.415964 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.518562 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.518640 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.518665 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.518693 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.518715 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.521489 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:23 crc kubenswrapper[4634]: E0929 13:45:23.521702 4634 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:23 crc kubenswrapper[4634]: E0929 13:45:23.521809 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs podName:85c1b26c-a922-4d3c-934f-e6968735a76e nodeName:}" failed. No retries permitted until 2025-09-29 13:45:39.521775281 +0000 UTC m=+70.090503070 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs") pod "network-metrics-daemon-nl5xm" (UID: "85c1b26c-a922-4d3c-934f-e6968735a76e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.621914 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.621985 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.622002 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.622026 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.622042 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.725641 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.725699 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.725717 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.725784 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.725802 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.829647 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.829703 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.829721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.829744 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.829761 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.933159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.933226 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.933251 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.933280 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:23 crc kubenswrapper[4634]: I0929 13:45:23.933302 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:23Z","lastTransitionTime":"2025-09-29T13:45:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.036258 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.036320 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.036340 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.036369 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.036387 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.109197 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.109210 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.109316 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:24 crc kubenswrapper[4634]: E0929 13:45:24.109369 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:24 crc kubenswrapper[4634]: E0929 13:45:24.109579 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:24 crc kubenswrapper[4634]: E0929 13:45:24.109710 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.139220 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.139282 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.139323 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.139350 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.139370 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.243220 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.243636 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.243885 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.244148 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.244372 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.347419 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.347478 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.347499 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.347523 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.347542 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.449348 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.449387 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.449400 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.449419 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.449434 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.552546 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.552590 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.552606 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.552625 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.552639 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.655349 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.655398 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.655415 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.655435 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.655451 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.757783 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.757843 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.757860 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.757885 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.757904 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.860046 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.860099 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.860110 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.860123 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.860131 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.962641 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.962672 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.962682 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.962712 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:24 crc kubenswrapper[4634]: I0929 13:45:24.962722 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:24Z","lastTransitionTime":"2025-09-29T13:45:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.065568 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.065623 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.065640 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.065660 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.065671 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.109910 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:25 crc kubenswrapper[4634]: E0929 13:45:25.110303 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.168708 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.169062 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.169284 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.169592 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.169765 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.272963 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.273023 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.273039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.273062 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.273076 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.375972 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.376016 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.376029 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.376047 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.376059 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.479492 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.479582 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.479607 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.479658 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.479681 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.583584 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.583674 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.583690 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.583716 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.583733 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.686599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.686708 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.686731 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.686764 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.686789 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.789701 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.789740 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.789765 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.789779 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.789788 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.892553 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.892647 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.892658 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.892699 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.892714 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.995232 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.995275 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.995287 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.995303 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:25 crc kubenswrapper[4634]: I0929 13:45:25.995315 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:25Z","lastTransitionTime":"2025-09-29T13:45:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.098220 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.098260 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.098271 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.098287 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.098299 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.109817 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.109882 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.109969 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.109817 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.110197 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.110333 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.200789 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.200865 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.200889 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.200921 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.200947 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.303718 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.303758 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.303769 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.303784 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.303794 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.406461 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.406526 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.406535 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.406582 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.406594 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.509707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.509763 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.509776 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.509796 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.509807 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.529461 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.529522 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.529542 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.529568 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.529586 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.548711 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:26Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.553744 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.553800 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.553819 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.553843 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.553859 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.571789 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:26Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.576054 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.576095 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.576104 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.576119 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.576132 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.594388 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:26Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.599133 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.599187 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.599297 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.599325 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.599342 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.616810 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:26Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.621410 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.621442 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.621455 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.621472 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.621487 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.640034 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:26Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:26 crc kubenswrapper[4634]: E0929 13:45:26.640222 4634 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.642050 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.642075 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.642106 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.642121 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.642132 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.745781 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.745836 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.745852 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.745874 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.745890 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.848059 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.848159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.848181 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.848209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.848230 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.951540 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.951584 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.951599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.951623 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:26 crc kubenswrapper[4634]: I0929 13:45:26.951639 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:26Z","lastTransitionTime":"2025-09-29T13:45:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.054682 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.054863 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.054881 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.054915 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.054933 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.109926 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:27 crc kubenswrapper[4634]: E0929 13:45:27.110152 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.159989 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.160074 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.160146 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.160192 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.160210 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.263386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.263445 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.263463 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.263488 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.263508 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.366143 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.366221 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.366238 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.366270 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.366288 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.468107 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.468201 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.468226 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.468290 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.468313 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.570653 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.570707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.570721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.570741 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.570756 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.674308 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.674378 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.674395 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.674419 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.674436 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.777646 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.777693 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.777707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.777724 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.777737 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.880997 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.881033 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.881045 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.881102 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.881116 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.984439 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.984493 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.984510 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.984534 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:27 crc kubenswrapper[4634]: I0929 13:45:27.984552 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:27Z","lastTransitionTime":"2025-09-29T13:45:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.087909 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.087971 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.087989 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.088014 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.088036 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.109250 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.109469 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:28 crc kubenswrapper[4634]: E0929 13:45:28.109676 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.109798 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:28 crc kubenswrapper[4634]: E0929 13:45:28.109887 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:28 crc kubenswrapper[4634]: E0929 13:45:28.109995 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.190722 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.190766 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.190780 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.190798 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.190810 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.294174 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.294223 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.294244 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.294270 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.294288 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.397279 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.397320 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.397332 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.397348 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.397360 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.501053 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.501497 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.501692 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.501889 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.502138 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.605679 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.606133 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.606288 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.606492 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.606660 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.709735 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.709788 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.709799 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.709814 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.709824 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.812793 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.813165 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.813335 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.813482 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.813778 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.916911 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.916957 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.916969 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.916986 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:28 crc kubenswrapper[4634]: I0929 13:45:28.917002 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:28Z","lastTransitionTime":"2025-09-29T13:45:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.019541 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.019840 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.019995 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.020172 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.020342 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.109924 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:29 crc kubenswrapper[4634]: E0929 13:45:29.111028 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.127730 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.127767 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.127775 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.127793 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.127805 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.230461 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.230500 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.230511 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.230527 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.230538 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.333181 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.333223 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.333235 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.333249 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.333257 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.435754 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.435801 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.435821 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.435844 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.435860 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.538594 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.538632 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.538643 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.538659 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.538669 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.641593 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.641648 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.641666 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.641691 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.641708 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.743886 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.743974 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.743991 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.744039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.744057 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.847454 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.847493 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.847510 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.847533 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.847550 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.949620 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.949736 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.949754 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.949776 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:29 crc kubenswrapper[4634]: I0929 13:45:29.949794 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:29Z","lastTransitionTime":"2025-09-29T13:45:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.052549 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.052973 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.052991 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.053035 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.053054 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.109568 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.109577 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.109880 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:30 crc kubenswrapper[4634]: E0929 13:45:30.109767 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:30 crc kubenswrapper[4634]: E0929 13:45:30.110537 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:30 crc kubenswrapper[4634]: E0929 13:45:30.110624 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.127472 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.146873 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.155524 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.155566 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.155578 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.155598 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.155611 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.165235 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.183565 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.199412 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.212317 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.226703 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.252435 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.262945 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.263429 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.263749 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.264059 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.264467 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.273746 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.289854 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.319434 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.333562 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.346070 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.360492 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.366786 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.366836 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.366849 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.366868 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.366880 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.373450 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.385214 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.405675 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.426364 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:30Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.469398 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.469563 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.469702 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.469841 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.469974 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.572070 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.572438 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.572582 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.572737 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.572868 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.675716 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.675773 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.675790 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.675812 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.675829 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.779308 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.779650 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.779829 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.779979 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.780144 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.882644 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.882873 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.882987 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.883060 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.883166 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.985423 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.985670 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.985826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.985911 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:30 crc kubenswrapper[4634]: I0929 13:45:30.985995 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:30Z","lastTransitionTime":"2025-09-29T13:45:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.088228 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.088281 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.088300 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.088324 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.088340 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.109896 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:31 crc kubenswrapper[4634]: E0929 13:45:31.110164 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.190051 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.190124 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.190142 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.190162 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.190174 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.292604 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.292633 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.292641 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.292653 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.292661 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.394976 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.395038 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.395055 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.395078 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.395129 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.500654 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.500940 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.501064 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.501216 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.501607 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.603457 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.603486 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.603496 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.603512 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.603521 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.706109 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.706146 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.706156 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.706173 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.706186 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.808566 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.808824 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.808890 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.808974 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.809042 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.910933 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.910957 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.910965 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.910976 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:31 crc kubenswrapper[4634]: I0929 13:45:31.910988 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:31Z","lastTransitionTime":"2025-09-29T13:45:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.015076 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.015143 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.015185 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.015207 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.015219 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.110042 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.110042 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:32 crc kubenswrapper[4634]: E0929 13:45:32.110195 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.110230 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:32 crc kubenswrapper[4634]: E0929 13:45:32.110312 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:32 crc kubenswrapper[4634]: E0929 13:45:32.110384 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.116855 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.116886 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.116895 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.116911 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.116923 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.219352 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.219420 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.219440 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.219472 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.219493 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.322516 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.322605 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.322627 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.322647 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.322662 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.424858 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.424915 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.424932 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.424958 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.424978 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.528211 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.528294 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.528319 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.528356 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.528379 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.631550 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.631614 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.631627 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.631646 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.631658 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.734748 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.734804 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.734822 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.734850 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.734870 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.839261 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.839333 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.839353 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.839383 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.839402 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.942433 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.942486 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.942503 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.942529 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:32 crc kubenswrapper[4634]: I0929 13:45:32.942551 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:32Z","lastTransitionTime":"2025-09-29T13:45:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.046445 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.046529 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.046548 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.046577 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.046602 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.109566 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:33 crc kubenswrapper[4634]: E0929 13:45:33.109783 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.149936 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.150034 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.150056 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.150121 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.150142 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.253430 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.253499 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.253515 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.253538 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.253554 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.357707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.357754 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.357766 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.357785 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.357797 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.461243 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.461306 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.461324 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.461351 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.461368 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.565274 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.565371 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.565392 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.565423 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.565441 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.668421 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.668511 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.668530 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.668554 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.668573 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.771837 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.771884 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.771896 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.771913 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.771923 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.874759 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.874811 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.874821 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.874842 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.874855 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.977760 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.977807 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.977822 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.977840 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:33 crc kubenswrapper[4634]: I0929 13:45:33.977853 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:33Z","lastTransitionTime":"2025-09-29T13:45:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.081003 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.081052 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.081062 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.081076 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.081097 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.109818 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.109908 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.109828 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:34 crc kubenswrapper[4634]: E0929 13:45:34.109997 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:34 crc kubenswrapper[4634]: E0929 13:45:34.110193 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:34 crc kubenswrapper[4634]: E0929 13:45:34.110297 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.111007 4634 scope.go:117] "RemoveContainer" containerID="ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66" Sep 29 13:45:34 crc kubenswrapper[4634]: E0929 13:45:34.111281 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.183965 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.184021 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.184031 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.184048 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.184057 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.287019 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.287347 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.287442 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.287523 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.287603 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.389963 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.390260 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.390377 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.390485 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.390575 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.493749 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.493792 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.493802 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.493818 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.493830 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.596886 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.596922 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.596932 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.596948 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.596959 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.698994 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.699017 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.699025 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.699037 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.699046 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.801712 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.801768 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.801788 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.801812 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.801828 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.904424 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.904463 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.904474 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.904488 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:34 crc kubenswrapper[4634]: I0929 13:45:34.904501 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:34Z","lastTransitionTime":"2025-09-29T13:45:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.007564 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.007603 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.007611 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.007626 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.007646 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.109140 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:35 crc kubenswrapper[4634]: E0929 13:45:35.109253 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.110008 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.110066 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.110075 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.110110 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.110121 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.212247 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.212300 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.212310 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.212326 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.212337 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.314992 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.315019 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.315027 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.315039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.315052 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.417903 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.417942 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.417956 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.417971 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.417982 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.520425 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.520467 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.520481 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.520494 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.520502 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.622818 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.622846 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.622854 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.622866 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.622874 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.725616 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.725678 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.725695 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.725726 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.725744 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.827994 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.828115 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.828153 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.828180 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.828199 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.930926 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.930984 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.931001 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.931027 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:35 crc kubenswrapper[4634]: I0929 13:45:35.931043 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:35Z","lastTransitionTime":"2025-09-29T13:45:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.033286 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.033336 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.033354 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.033377 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.033401 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.109607 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.109632 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.109598 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.109727 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.109796 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.109926 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.135353 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.135392 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.135403 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.135418 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.135428 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.238577 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.238626 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.238643 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.238668 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.238685 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.340558 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.340601 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.340611 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.340623 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.340634 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.443256 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.443310 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.443323 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.443353 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.443363 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.545845 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.545927 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.545944 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.545967 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.545984 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.648303 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.648349 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.648358 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.648374 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.648383 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.751060 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.751123 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.751134 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.751151 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.751162 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.853051 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.853104 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.853113 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.853127 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.853136 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.883650 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.883684 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.883693 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.883706 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.883718 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.895886 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:36Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.899887 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.899931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.899942 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.899958 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.899971 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.916464 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:36Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.919424 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.919445 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.919453 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.919465 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.919474 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.931506 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:36Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.934785 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.934842 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.934850 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.934863 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.934872 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.945695 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:36Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.949271 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.949297 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.949305 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.949332 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.949341 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.959832 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:36Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:36 crc kubenswrapper[4634]: E0929 13:45:36.959986 4634 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.961262 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.961280 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.961287 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.961297 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:36 crc kubenswrapper[4634]: I0929 13:45:36.961305 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:36Z","lastTransitionTime":"2025-09-29T13:45:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.064007 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.064053 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.064067 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.064108 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.064131 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.109712 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:37 crc kubenswrapper[4634]: E0929 13:45:37.109910 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.166295 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.166351 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.166361 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.166373 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.166381 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.269025 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.269069 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.269096 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.269112 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.269121 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.370896 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.370922 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.370930 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.370943 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.370951 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.472857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.472899 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.472911 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.472933 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.472946 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.574713 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.574757 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.574766 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.574781 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.574790 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.677248 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.677316 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.677330 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.677349 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.677362 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.780039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.780115 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.780128 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.780143 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.780154 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.882056 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.882108 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.882118 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.882132 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.882141 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.984687 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.984727 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.984739 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.984757 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:37 crc kubenswrapper[4634]: I0929 13:45:37.984769 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:37Z","lastTransitionTime":"2025-09-29T13:45:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.086628 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.086679 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.086695 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.086720 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.086737 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.109940 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:38 crc kubenswrapper[4634]: E0929 13:45:38.110126 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.110389 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:38 crc kubenswrapper[4634]: E0929 13:45:38.110492 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.110778 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:38 crc kubenswrapper[4634]: E0929 13:45:38.110877 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.188680 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.188725 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.188736 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.188753 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.188765 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.290672 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.290714 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.290728 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.290746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.290758 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.393144 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.393204 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.393223 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.393254 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.393276 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.495599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.495660 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.495672 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.495709 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.495721 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.597829 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.597871 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.597882 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.597902 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.597913 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.699733 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.699764 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.699772 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.699805 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.699818 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.802738 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.802809 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.802828 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.802856 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.802872 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.905236 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.905275 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.905284 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.905300 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:38 crc kubenswrapper[4634]: I0929 13:45:38.905308 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:38Z","lastTransitionTime":"2025-09-29T13:45:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.007899 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.007940 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.007948 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.007962 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.007971 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.109214 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:39 crc kubenswrapper[4634]: E0929 13:45:39.109365 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.110570 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.110595 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.110604 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.110615 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.110624 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.212577 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.212610 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.212618 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.212630 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.212639 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.314741 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.314775 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.314787 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.314803 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.314815 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.417372 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.417398 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.417407 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.417420 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.417431 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.519295 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.519322 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.519333 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.519349 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.519360 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.601671 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:39 crc kubenswrapper[4634]: E0929 13:45:39.601813 4634 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:39 crc kubenswrapper[4634]: E0929 13:45:39.601866 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs podName:85c1b26c-a922-4d3c-934f-e6968735a76e nodeName:}" failed. No retries permitted until 2025-09-29 13:46:11.60184992 +0000 UTC m=+102.170577669 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs") pod "network-metrics-daemon-nl5xm" (UID: "85c1b26c-a922-4d3c-934f-e6968735a76e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.620753 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.620782 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.620792 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.620807 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.620818 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.723332 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.723364 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.723374 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.723391 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.723403 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.826310 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.826354 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.826368 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.826384 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.826396 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.928713 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.928752 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.928763 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.928778 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:39 crc kubenswrapper[4634]: I0929 13:45:39.928789 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:39Z","lastTransitionTime":"2025-09-29T13:45:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.031734 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.031767 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.031776 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.031788 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.031797 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.109677 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.109799 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:40 crc kubenswrapper[4634]: E0929 13:45:40.109901 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.109981 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:40 crc kubenswrapper[4634]: E0929 13:45:40.110178 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:40 crc kubenswrapper[4634]: E0929 13:45:40.110293 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.125523 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.134372 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.134408 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.134417 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.134432 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.134442 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.135913 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.150437 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.170876 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.205592 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.215836 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.227926 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.236331 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.236526 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.236587 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.236651 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.236711 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.238843 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.249954 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.261379 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.277410 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.296559 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.309217 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.321456 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.333989 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.341042 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.341119 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.341132 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.341149 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.341160 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.349494 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.359887 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.371273 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:40Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.443555 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.443592 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.443601 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.443614 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.443624 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.546409 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.546450 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.546460 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.546476 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.546488 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.648843 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.649188 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.649294 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.649411 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.649497 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.752501 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.752554 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.752570 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.752592 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.752608 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.855480 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.855539 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.855557 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.855581 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.855599 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.958534 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.958588 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.958599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.958616 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:40 crc kubenswrapper[4634]: I0929 13:45:40.958627 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:40Z","lastTransitionTime":"2025-09-29T13:45:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.061464 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.061547 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.061573 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.061606 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.061627 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.110014 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:41 crc kubenswrapper[4634]: E0929 13:45:41.110297 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.164764 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.164844 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.164864 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.164900 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.164922 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.266948 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.266980 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.266989 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.267007 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.267016 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.369459 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.369504 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.369516 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.369531 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.369543 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.472215 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.472249 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.472257 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.472275 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.472283 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.509328 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/0.log" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.509374 4634 generic.go:334] "Generic (PLEG): container finished" podID="77b5113e-50cd-417c-8991-cae5cd823f3f" containerID="9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c" exitCode=1 Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.509401 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wtnjd" event={"ID":"77b5113e-50cd-417c-8991-cae5cd823f3f","Type":"ContainerDied","Data":"9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.509735 4634 scope.go:117] "RemoveContainer" containerID="9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.543149 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.560621 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.572270 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.575824 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.575862 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.575870 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.575884 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.575895 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.582787 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.601757 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.614551 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.630104 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.643254 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.655500 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.665465 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.676850 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.677860 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.677886 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.677895 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.677908 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.677919 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.694045 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.704644 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.713050 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.724299 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"2025-09-29T13:44:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883\\\\n2025-09-29T13:44:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883 to /host/opt/cni/bin/\\\\n2025-09-29T13:44:56Z [verbose] multus-daemon started\\\\n2025-09-29T13:44:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T13:45:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.735422 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.755128 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.763209 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:41Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.779904 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.779942 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.779951 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.779964 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.779974 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.881632 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.881661 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.881670 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.881683 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.881693 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.985345 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.985430 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.985453 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.985510 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:41 crc kubenswrapper[4634]: I0929 13:45:41.985529 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:41Z","lastTransitionTime":"2025-09-29T13:45:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.088933 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.089028 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.089047 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.089071 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.089130 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.109291 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.109364 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:42 crc kubenswrapper[4634]: E0929 13:45:42.109421 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:42 crc kubenswrapper[4634]: E0929 13:45:42.109551 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.109598 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:42 crc kubenswrapper[4634]: E0929 13:45:42.109646 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.191125 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.191166 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.191180 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.191197 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.191210 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.293539 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.293583 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.293592 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.293611 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.293620 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.396007 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.396054 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.396072 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.396130 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.396149 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.499214 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.499294 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.499314 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.499348 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.499369 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.514314 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/0.log" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.514391 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wtnjd" event={"ID":"77b5113e-50cd-417c-8991-cae5cd823f3f","Type":"ContainerStarted","Data":"ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.544780 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.557022 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.575792 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"2025-09-29T13:44:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883\\\\n2025-09-29T13:44:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883 to /host/opt/cni/bin/\\\\n2025-09-29T13:44:56Z [verbose] multus-daemon started\\\\n2025-09-29T13:44:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T13:45:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.595706 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.602357 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.602398 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.602438 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.602459 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.602473 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.617994 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.629522 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.646310 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.664571 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.676586 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.690443 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.703418 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.708797 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.708836 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.708846 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.708860 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.708873 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.724502 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.738714 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.755840 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.767208 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.777104 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.788520 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.800596 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:42Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.811124 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.811146 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.811156 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.811168 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.811178 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.913464 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.913491 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.913499 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.913511 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:42 crc kubenswrapper[4634]: I0929 13:45:42.913519 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:42Z","lastTransitionTime":"2025-09-29T13:45:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.016136 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.016176 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.016186 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.016200 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.016209 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.110045 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:43 crc kubenswrapper[4634]: E0929 13:45:43.110188 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.119179 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.119224 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.119240 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.119261 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.119278 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.221581 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.221619 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.221628 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.221642 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.221651 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.324102 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.324153 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.324164 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.324178 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.324187 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.426809 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.426851 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.426860 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.426892 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.426906 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.529538 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.529580 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.529591 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.529609 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.529620 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.632058 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.632115 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.632132 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.632152 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.632165 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.734065 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.734129 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.734142 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.734158 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.734170 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.836729 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.836774 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.836786 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.836804 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.836816 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.939642 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.939681 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.939693 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.939707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:43 crc kubenswrapper[4634]: I0929 13:45:43.939717 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:43Z","lastTransitionTime":"2025-09-29T13:45:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.042794 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.043076 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.043377 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.043525 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.043668 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.109951 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.110018 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.110230 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:44 crc kubenswrapper[4634]: E0929 13:45:44.110628 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:44 crc kubenswrapper[4634]: E0929 13:45:44.110778 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:44 crc kubenswrapper[4634]: E0929 13:45:44.110471 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.146890 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.147246 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.148080 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.148328 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.148755 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.252942 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.252999 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.253019 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.253045 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.253064 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.356293 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.356581 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.356704 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.356822 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.356988 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.460074 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.460867 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.461045 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.461209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.461341 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.565118 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.565477 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.565616 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.565738 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.565851 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.669534 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.669605 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.669623 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.669648 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.669679 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.772852 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.772908 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.772930 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.772961 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.772981 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.876044 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.876118 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.876136 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.876158 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.876175 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.979174 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.979541 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.979688 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.979826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:44 crc kubenswrapper[4634]: I0929 13:45:44.979984 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:44Z","lastTransitionTime":"2025-09-29T13:45:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.084309 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.084701 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.084886 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.085158 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.085316 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.109811 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:45 crc kubenswrapper[4634]: E0929 13:45:45.110008 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.188800 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.188859 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.188879 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.188908 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.188929 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.292150 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.292196 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.292215 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.292242 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.292259 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.395347 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.395404 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.395421 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.395445 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.395466 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.499322 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.499386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.499404 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.499429 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.499445 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.602497 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.602560 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.602578 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.602602 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.602619 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.705597 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.705651 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.705669 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.705693 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.705710 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.808966 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.809010 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.809021 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.809038 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.809049 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.911902 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.911937 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.911947 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.911964 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:45 crc kubenswrapper[4634]: I0929 13:45:45.911977 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:45Z","lastTransitionTime":"2025-09-29T13:45:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.014567 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.014621 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.014644 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.014670 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.014684 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.109220 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:46 crc kubenswrapper[4634]: E0929 13:45:46.109382 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.109661 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:46 crc kubenswrapper[4634]: E0929 13:45:46.109876 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.110183 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:46 crc kubenswrapper[4634]: E0929 13:45:46.110294 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.147705 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.147757 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.147770 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.147789 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.147802 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.250958 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.251015 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.251039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.251069 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.251132 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.354152 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.354209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.354226 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.354257 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.354275 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.457319 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.457356 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.457365 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.457379 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.457389 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.560864 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.560950 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.560975 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.561006 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.561027 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.664799 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.664907 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.664948 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.664991 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.665021 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.768344 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.768410 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.768423 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.768447 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.768462 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.872339 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.872440 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.872469 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.872504 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.872524 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.974972 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.975039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.975058 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.975124 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:46 crc kubenswrapper[4634]: I0929 13:45:46.975140 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:46Z","lastTransitionTime":"2025-09-29T13:45:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.079736 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.079819 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.079844 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.079883 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.079911 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.088970 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.089018 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.089036 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.089058 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.089075 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.109851 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:47 crc kubenswrapper[4634]: E0929 13:45:47.110396 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:47 crc kubenswrapper[4634]: E0929 13:45:47.112039 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:47Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.118298 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.118372 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.118394 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.118425 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.118444 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: E0929 13:45:47.141424 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:47Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.150826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.150874 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.150887 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.150915 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.150929 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: E0929 13:45:47.169024 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:47Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.179399 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.179469 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.179487 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.179513 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.179533 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: E0929 13:45:47.200411 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:47Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.205385 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.205442 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.205505 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.205551 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.205579 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: E0929 13:45:47.224951 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:47Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:47 crc kubenswrapper[4634]: E0929 13:45:47.225197 4634 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.228354 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.228462 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.228491 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.228523 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.228549 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.332754 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.332911 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.332931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.332960 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.333017 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.435555 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.435620 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.435637 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.435665 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.435683 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.538785 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.538837 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.538851 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.538871 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.538886 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.641657 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.641749 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.641761 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.641779 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.641790 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.745022 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.745064 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.745102 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.745123 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.745136 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.847877 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.847953 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.847977 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.848004 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.848024 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.951774 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.951870 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.951889 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.951914 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:47 crc kubenswrapper[4634]: I0929 13:45:47.951932 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:47Z","lastTransitionTime":"2025-09-29T13:45:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.055175 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.055243 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.055258 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.055284 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.055300 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.109750 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.109743 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:48 crc kubenswrapper[4634]: E0929 13:45:48.109935 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.109778 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:48 crc kubenswrapper[4634]: E0929 13:45:48.110072 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:48 crc kubenswrapper[4634]: E0929 13:45:48.110458 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.158808 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.158869 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.158887 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.158912 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.158930 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.262340 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.262724 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.262895 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.263035 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.263061 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.366803 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.366848 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.366860 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.366879 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.366893 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.470555 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.470624 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.470642 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.470672 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.470690 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.574282 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.574344 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.574362 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.574388 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.574406 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.676663 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.676724 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.676741 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.676765 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.676808 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.780166 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.780232 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.780251 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.780276 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.780294 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.882579 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.883075 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.883361 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.883587 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.883804 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.985901 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.985939 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.985971 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.985987 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:48 crc kubenswrapper[4634]: I0929 13:45:48.985997 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:48Z","lastTransitionTime":"2025-09-29T13:45:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.089237 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.089307 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.089328 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.089356 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.089379 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.109876 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:49 crc kubenswrapper[4634]: E0929 13:45:49.110874 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.111177 4634 scope.go:117] "RemoveContainer" containerID="ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.192713 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.192792 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.192814 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.192844 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.192889 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.295253 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.295299 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.295314 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.295333 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.295346 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.399241 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.399305 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.399323 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.399349 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.399367 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.504373 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.504423 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.504439 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.504463 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.504481 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.542846 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/2.log" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.547205 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.548015 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.590347 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.607652 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.607712 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.607731 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.607755 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.607773 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.618898 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.642800 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.661904 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.697479 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.710293 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.710347 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.710360 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.710380 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.710404 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.716397 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.731948 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.748415 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.761472 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.773039 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.791586 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.805588 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.813111 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.813157 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.813167 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.813185 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.813196 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.818919 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.834436 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.851928 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"2025-09-29T13:44:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883\\\\n2025-09-29T13:44:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883 to /host/opt/cni/bin/\\\\n2025-09-29T13:44:56Z [verbose] multus-daemon started\\\\n2025-09-29T13:44:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T13:45:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.867440 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.889158 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.901349 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:49Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.915611 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.915678 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.915690 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.915704 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:49 crc kubenswrapper[4634]: I0929 13:45:49.915712 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:49Z","lastTransitionTime":"2025-09-29T13:45:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.019986 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.020050 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.020066 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.020143 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.020162 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.110041 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:50 crc kubenswrapper[4634]: E0929 13:45:50.110243 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.110277 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.110430 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:50 crc kubenswrapper[4634]: E0929 13:45:50.110455 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:50 crc kubenswrapper[4634]: E0929 13:45:50.110769 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.122202 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.122255 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.122269 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.122288 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.122302 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.127936 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.143891 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.155002 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.167761 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.187241 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"2025-09-29T13:44:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883\\\\n2025-09-29T13:44:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883 to /host/opt/cni/bin/\\\\n2025-09-29T13:44:56Z [verbose] multus-daemon started\\\\n2025-09-29T13:44:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T13:45:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.211381 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.224562 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.224616 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.224631 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.224656 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.224671 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.237753 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.250550 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.269607 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.297795 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.314096 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.325873 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.326520 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.326590 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.326601 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.326618 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.326629 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.336940 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.349217 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.359114 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.369071 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.379774 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.393888 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.429780 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.429822 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.429840 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.429874 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.429890 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.532857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.532908 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.532917 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.532931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.532943 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.551719 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/3.log" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.552432 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/2.log" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.556506 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" exitCode=1 Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.556540 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.556592 4634 scope.go:117] "RemoveContainer" containerID="ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.559495 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:45:50 crc kubenswrapper[4634]: E0929 13:45:50.559798 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.580829 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.599173 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.612128 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.624974 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.635639 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.635677 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.635691 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.635713 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.635729 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.640813 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"2025-09-29T13:44:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883\\\\n2025-09-29T13:44:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883 to /host/opt/cni/bin/\\\\n2025-09-29T13:44:56Z [verbose] multus-daemon started\\\\n2025-09-29T13:44:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T13:45:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.651549 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.668620 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec1c49e0b17c2262ad449f4e1c08d0f4b1fee42185cbfef1311232120c39fb66\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:18Z\\\",\\\"message\\\":\\\"Endpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0929 13:45:18.708558 6205 model_client.go:382] Update operations generated as: [{Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.4 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {43933d5e-3c3b-4ff8-8926-04ac25de450e}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 13:45:18.708572 6205 services_controller.go:444] Built service openshift-console/downloads LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708583 6205 services_controller.go:445] Built service openshift-console/downloads LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708586 6205 services_controller.go:444] Built service openshift-machine-api/machine-api-controllers LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708600 6205 services_controller.go:445] Built service openshift-machine-api/machine-api-controllers LB template configs for network=default: []services.lbConfig(nil)\\\\nI0929 13:45:18.708609 6205 services_controller.go:451] Built ser\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:50Z\\\",\\\"message\\\":\\\"j_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 13:45:50.092930 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0929 13:45:50.092774 6557 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster\\\\\\\", UUID:\\\\\\\"a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{S\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.680705 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.693510 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.721574 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.735330 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.738729 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.738759 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.738770 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.738785 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.738797 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.750468 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.762307 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.774930 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.786563 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.800046 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.813905 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.827074 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:50Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.841042 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.841070 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.841104 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.841120 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.841131 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.943554 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.943597 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.943609 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.943630 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:50 crc kubenswrapper[4634]: I0929 13:45:50.943642 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:50Z","lastTransitionTime":"2025-09-29T13:45:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.046728 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.046786 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.046805 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.046830 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.046847 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.109514 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:51 crc kubenswrapper[4634]: E0929 13:45:51.109766 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.149115 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.149269 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.149296 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.149325 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.149346 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.252463 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.252506 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.252516 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.252531 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.252543 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.354854 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.354881 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.354889 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.354901 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.354909 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.456655 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.456685 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.456692 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.456707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.456717 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.559104 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.559141 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.559152 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.559167 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.559178 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.561775 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/3.log" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.566116 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:45:51 crc kubenswrapper[4634]: E0929 13:45:51.566418 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.577262 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.589797 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.601157 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.615912 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.629831 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.642626 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.657166 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.660875 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.660927 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.660940 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.660969 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.660983 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.675248 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.686884 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.702888 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"2025-09-29T13:44:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883\\\\n2025-09-29T13:44:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883 to /host/opt/cni/bin/\\\\n2025-09-29T13:44:56Z [verbose] multus-daemon started\\\\n2025-09-29T13:44:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T13:45:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.714894 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.732553 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:50Z\\\",\\\"message\\\":\\\"j_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 13:45:50.092930 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0929 13:45:50.092774 6557 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster\\\\\\\", UUID:\\\\\\\"a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{S\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.742021 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.757596 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.763628 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.763700 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.763732 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.763752 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.763773 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.777822 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.790402 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.802426 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.812365 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:51Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.866454 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.866503 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.866512 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.866529 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.866539 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.968739 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.968809 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.968835 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.968864 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:51 crc kubenswrapper[4634]: I0929 13:45:51.968885 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:51Z","lastTransitionTime":"2025-09-29T13:45:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.071836 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.071874 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.071911 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.071926 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.071937 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.109628 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:52 crc kubenswrapper[4634]: E0929 13:45:52.109910 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.109729 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.109633 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:52 crc kubenswrapper[4634]: E0929 13:45:52.109994 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:52 crc kubenswrapper[4634]: E0929 13:45:52.110171 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.173996 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.174056 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.174067 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.174097 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.174114 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.276681 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.276807 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.276837 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.276884 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.276906 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.379936 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.379982 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.379999 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.380021 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.380036 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.483022 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.483158 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.483183 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.483207 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.483226 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.585404 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.585445 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.585453 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.585468 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.585476 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.687846 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.687916 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.687936 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.687955 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.687969 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.791573 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.791641 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.791657 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.791679 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.791694 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.894500 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.894558 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.894567 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.894580 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.894590 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.996950 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.996996 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.997008 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.997026 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:52 crc kubenswrapper[4634]: I0929 13:45:52.997035 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:52Z","lastTransitionTime":"2025-09-29T13:45:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.099705 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.099746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.099757 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.099775 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.099786 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.110216 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.110395 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.203112 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.203154 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.203162 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.203176 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.203188 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.305748 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.305826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.305845 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.305869 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.305890 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.409303 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.409364 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.409381 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.409408 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.409426 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.512321 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.512375 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.512393 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.512416 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.512432 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.614722 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.614790 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.614809 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.614834 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.614853 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.716726 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.716786 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.716804 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.716826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.716842 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.819848 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.819901 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.819919 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.819944 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.819961 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.873174 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.873379 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873421 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.873382988 +0000 UTC m=+148.442110797 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.873481 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.873579 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873692 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873737 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873743 4634 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873758 4634 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873769 4634 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873841 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.873817949 +0000 UTC m=+148.442545748 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873890 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.873859241 +0000 UTC m=+148.442587030 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.873927 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.873909542 +0000 UTC m=+148.442637401 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.922284 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.922345 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.922367 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.922395 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.922416 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:53Z","lastTransitionTime":"2025-09-29T13:45:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:53 crc kubenswrapper[4634]: I0929 13:45:53.974444 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.974667 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.974695 4634 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.974713 4634 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:53 crc kubenswrapper[4634]: E0929 13:45:53.974790 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.974767996 +0000 UTC m=+148.543495775 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.025321 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.025386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.025409 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.025438 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.025461 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.110230 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.110262 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:54 crc kubenswrapper[4634]: E0929 13:45:54.110407 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.110262 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:54 crc kubenswrapper[4634]: E0929 13:45:54.110574 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:54 crc kubenswrapper[4634]: E0929 13:45:54.110814 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.128452 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.128561 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.128715 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.128748 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.128773 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.231301 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.231342 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.231352 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.231368 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.231380 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.334108 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.334152 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.334166 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.334184 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.334198 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.437621 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.437684 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.437702 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.437727 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.437746 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.539989 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.540029 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.540042 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.540056 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.540066 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.642873 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.642921 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.642931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.642949 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.642962 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.745325 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.745428 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.745478 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.745500 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.745516 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.849911 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.850021 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.850046 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.850133 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.850170 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.953806 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.953859 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.953881 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.953909 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:54 crc kubenswrapper[4634]: I0929 13:45:54.953931 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:54Z","lastTransitionTime":"2025-09-29T13:45:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.056387 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.056429 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.056449 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.056468 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.056478 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.109152 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:55 crc kubenswrapper[4634]: E0929 13:45:55.109338 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.119067 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.158966 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.159001 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.159010 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.159025 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.159033 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.261667 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.261732 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.261753 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.261779 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.261796 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.364606 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.364665 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.364673 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.364686 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.364694 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.467199 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.467235 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.467244 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.467257 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.467266 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.570878 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.570928 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.570966 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.570986 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.571001 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.674835 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.674919 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.674943 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.675395 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.675620 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.778972 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.779045 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.779069 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.779141 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.779167 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.882126 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.882186 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.882207 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.882234 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.882254 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.985657 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.985746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.985763 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.985785 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:55 crc kubenswrapper[4634]: I0929 13:45:55.985803 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:55Z","lastTransitionTime":"2025-09-29T13:45:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.088850 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.088890 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.088902 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.088921 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.088934 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.109413 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.109540 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:56 crc kubenswrapper[4634]: E0929 13:45:56.109682 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:56 crc kubenswrapper[4634]: E0929 13:45:56.109856 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.109942 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:56 crc kubenswrapper[4634]: E0929 13:45:56.110037 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.218557 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.218784 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.218801 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.218825 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.218843 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.321070 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.321111 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.321121 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.321134 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.321143 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.424596 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.424642 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.424657 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.424675 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.424691 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.526733 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.526773 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.526783 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.526798 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.526808 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.628692 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.629007 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.629119 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.629194 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.629263 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.731468 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.731815 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.732038 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.732323 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.732545 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.835414 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.835645 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.835733 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.835825 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.835931 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.938707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.939238 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.939430 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.939603 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:56 crc kubenswrapper[4634]: I0929 13:45:56.939737 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:56Z","lastTransitionTime":"2025-09-29T13:45:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.041937 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.041990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.042005 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.042026 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.042043 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.110101 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:57 crc kubenswrapper[4634]: E0929 13:45:57.110433 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.144722 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.144757 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.144770 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.144789 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.144804 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.247033 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.247278 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.247288 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.247302 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.247312 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.341837 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.341886 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.341903 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.341930 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.341948 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: E0929 13:45:57.360044 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.363262 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.363297 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.363307 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.363321 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.363333 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: E0929 13:45:57.380785 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.384030 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.384062 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.384070 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.384097 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.384106 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: E0929 13:45:57.395334 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.399144 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.399299 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.399415 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.399538 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.399645 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: E0929 13:45:57.412334 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.415997 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.416170 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.416278 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.416411 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.416533 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: E0929 13:45:57.432736 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:45:57Z is after 2025-08-24T17:21:41Z" Sep 29 13:45:57 crc kubenswrapper[4634]: E0929 13:45:57.432854 4634 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.434223 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.434270 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.434281 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.434296 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.434309 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.537811 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.538177 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.538288 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.538377 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.538471 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.642667 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.642729 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.642747 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.642775 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.642792 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.745718 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.745800 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.745819 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.745845 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.745861 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.849331 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.849391 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.849407 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.849429 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.849449 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.951781 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.951870 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.951888 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.951912 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:57 crc kubenswrapper[4634]: I0929 13:45:57.951929 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:57Z","lastTransitionTime":"2025-09-29T13:45:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.054524 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.054604 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.054622 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.054646 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.054664 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.110299 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.110379 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.110336 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:45:58 crc kubenswrapper[4634]: E0929 13:45:58.110548 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:45:58 crc kubenswrapper[4634]: E0929 13:45:58.110727 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:45:58 crc kubenswrapper[4634]: E0929 13:45:58.110906 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.157521 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.157583 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.157601 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.157625 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.157643 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.260383 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.260453 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.260473 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.260503 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.260528 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.363240 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.363299 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.363312 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.363330 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.363343 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.465571 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.465609 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.465623 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.465644 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.465657 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.568113 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.568223 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.568235 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.568249 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.568258 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.670919 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.670992 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.671016 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.671042 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.671060 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.774850 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.775246 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.775401 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.775564 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.775700 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.878982 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.879048 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.879070 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.879136 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.879159 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.982939 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.983973 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.984186 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.984342 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:58 crc kubenswrapper[4634]: I0929 13:45:58.984501 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:58Z","lastTransitionTime":"2025-09-29T13:45:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.088602 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.088975 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.089228 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.089509 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.089744 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.110283 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:45:59 crc kubenswrapper[4634]: E0929 13:45:59.110544 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.193625 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.193682 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.193769 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.193795 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.193812 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.297514 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.297591 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.297615 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.297645 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.297670 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.400159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.400203 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.400217 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.400235 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.400253 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.503346 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.503673 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.503839 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.504048 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.504234 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.606988 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.607029 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.607039 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.607056 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.607067 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.709321 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.709353 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.709362 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.709375 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.709384 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.812837 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.812880 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.812892 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.812907 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.812952 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.915177 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.915230 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.915244 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.915264 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:45:59 crc kubenswrapper[4634]: I0929 13:45:59.915281 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:45:59Z","lastTransitionTime":"2025-09-29T13:45:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.018355 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.019159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.019342 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.019484 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.019617 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.110303 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.110805 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.110879 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:00 crc kubenswrapper[4634]: E0929 13:46:00.111524 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:00 crc kubenswrapper[4634]: E0929 13:46:00.111942 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:00 crc kubenswrapper[4634]: E0929 13:46:00.112261 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.124690 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.124757 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.124784 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.124813 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.124834 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.138738 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d11f66313a1bb13b83c535b71201a5b9153e47531de275da94dc2b77724f50\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.158456 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9490377fd2c79de7322bbdee7f4eaa085f984253c34b68a9ef5375795789416\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.177460 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85c1b26c-a922-4d3c-934f-e6968735a76e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-92zjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:07Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nl5xm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.204507 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"155e7c30-4de9-4bcf-a403-bede451d17aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e70e9b8578e42c1f746c3460ca9bcbb507ec6e1c0737e139256f8a7729bf4bb7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb75ab556e9e57788579926f036357b856390b32ac52c0e7db7e3c87156ad90\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8b171d0de0092f3f39d45def67145443dfafadb2f3165f0b0c82ea31d779665\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ed6a67847142b64468454dec0215e12bac4cead4841b68b8159fbabd348171a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0299a5a6c0d7899fabc596233836ceecdf4a4fb428a9b3091b077b696808387c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 13:44:49.886796 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 13:44:49.887490 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 13:44:49.888829 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2370707441/tls.crt::/tmp/serving-cert-2370707441/tls.key\\\\\\\"\\\\nI0929 13:44:50.158099 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 13:44:50.160731 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 13:44:50.160749 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 13:44:50.160771 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 13:44:50.160775 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 13:44:50.166913 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 13:44:50.166937 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nI0929 13:44:50.166977 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0929 13:44:50.167299 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 13:44:50.167316 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 13:44:50.167322 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 13:44:50.167326 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 13:44:50.167329 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0929 13:44:50.168400 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b5614fb31031cbfae85141ea9336dd31a6f1dd7a077cb3f310a8403c1ff0d6d1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e372ff21eab6a2dee2c7390f9ed37cf50dec980ed1368b96b0763fd2174e290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.227190 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.227248 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.227311 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.227345 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.227409 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.232347 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9173d45a-da12-4090-92c3-65ad4dcec715\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c975b481c5b813651e63491b9f4fe44ea2d9f8d5505788d5082989cee36233cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6n72r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-k9jf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.272712 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65f06677-4cbf-41c9-a0da-02f49710c11c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:50Z\\\",\\\"message\\\":\\\"j_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0929 13:45:50.092930 6557 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0929 13:45:50.092774 6557 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster\\\\\\\", UUID:\\\\\\\"a36f6289-d09f-43f8-8a8a-c9d2cc11eb0d\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-daemon_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-daemon\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{S\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k24j2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-jqlh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.287233 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qvsct" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c79a239-2e61-4c28-8d03-1f8cebce6190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://12c768519a4ab1b8fe2a9b691be4c8d30ee8f5ae7bb5da5358b60f261417aa6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9cnjr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qvsct\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.307628 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wtnjd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77b5113e-50cd-417c-8991-cae5cd823f3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T13:45:41Z\\\",\\\"message\\\":\\\"2025-09-29T13:44:55+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883\\\\n2025-09-29T13:44:55+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8591fd9e-3ee2-4efb-a5fa-c65100cda883 to /host/opt/cni/bin/\\\\n2025-09-29T13:44:56Z [verbose] multus-daemon started\\\\n2025-09-29T13:44:56Z [verbose] Readiness Indicator file check\\\\n2025-09-29T13:45:41Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-svzgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wtnjd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.322489 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.330227 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.330271 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.330284 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.330301 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.330314 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.337529 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.351444 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-sxkt4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3c08520a-99c4-43f8-8cd3-6547f22c571b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d03879db4ad5b04fe13cdd62ff7a419ac6c3ef37da76f2d8133989cc3848c517\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lkwzm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-sxkt4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.373948 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-b59nb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fd1b6a92-008b-40ed-bbbb-15270d2f599a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dedaf0164e1fc0c3fb680f4ca47d504f8edec0076e1bd4cb26389dc556834e9b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://54c5e2cc671a646a4cdb7134306449bdc194a62db15d44daef474c5d3ff83463\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bfe568c2c87017c6432938afcd59b8a487c50385943d87f433186563185ead18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d749bbf5191fe7619594cd340ad009d9616c6e58dc9c221584283fa486792ba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://506d6322ba22cc6a2bdb68376434631a264c69b165c7f94369de9b60d9faeae1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1837aa8bfc75cf1c72621ccb46ed3c3b8d9fc397de88f8437c9c6a343858def0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d3fca1957e1ed9e195adf3a73d583b23cd97945049371cc901fbee134c601d41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:45:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:45:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-64gtc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-b59nb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.405914 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b97bca5f-5888-463a-bcd1-62ebd8484bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f895f3ec31a6ea5a950c2ec286a810559bf202b2ce58385ef45dce3d88e8275\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fae967de05b852186617002b02801dcac0dd0fbed0041e2a793791131837e062\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a93d4cab582c9d1e904f511ac17e611ca15fe1f3e2d4b0b1e45cdab514ff134\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a820698665373fe2d2bc90c8dacf7318791ce96bfc9843c5d9a56e4ed89f69ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d77b664b57c4b97ec3eea44f81ef0258435f4677a192a9781bdd9295a1963c09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842b43794eef40508e04bf8bc2a3612b28f7d90b9e3e01ca36d1d6ceeeee5b64\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2975357daca5bb5e1a872a0346876f80945908751f65f36b7aa3de318ac9214f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de6c80d640e15d8531d2b73a1c514f1f715b72846806bfa691311946193ea44c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.425362 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7bde8fbb-875a-4869-97a1-41ea4728e7ec\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b56ce67df981d54d91da25a29112a696795943af625a5785a1eeae940862eba2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://91f9d5b4fb83d94d2b6b6e2c85e3263942a0e32b64e664197f12f4f64c6eeadc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f977a86afddf1c8b4e38b8a4e2c0a596ef84dcc7905eb37c2d140b4726b2f853\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c246cfee8dddb22f19198f6ed9eb0772b89a612113439b3cb9468e27db6455cf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.432831 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.432857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.432869 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.432884 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.432895 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.442800 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8677ed4d-eac7-4d05-b756-c512b13072b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e67ad0abc7b3556e2564905808247dbc4049bc99621abc89d120f641b6b351aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8254d5dd4ee442314c5186a6bdd483ded7d14b9426cbbd0e8a659b9a0aad77eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8254d5dd4ee442314c5186a6bdd483ded7d14b9426cbbd0e8a659b9a0aad77eb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.461955 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:50Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.476583 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:51Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6b39dc8cbee0d8cf5419a0868e0fcb13c5fb6ae78003ff6f7bca5310ac1a178e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://02d08f36ad8113caa6b436bb88cfcd73f157d648b1dbeeb80dfafac52b13d145\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.491052 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2a9def76-5f53-4d1b-bf94-cb2fb8a89ea6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e399c110365af750a3d4a8856587221cf8a3b579a08949b96794ff444266204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12b8decf4339f9cc612e6cae419d64225d3223a7de35178f385e9127c5b7425d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:45:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rbkjw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:45:06Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-vgbtn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.509461 4634 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6cc72ff4-71f0-41e4-a866-45a3007f9074\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:45:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T13:44:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://035a88a713fd66098876b6c50685c6e20b8e8209fbe483d3d45c98764d5a519c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02fd5b0f0c275a436052110cb8053d91159b6c44af7584381dfbe8bad1cd78ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e7218f37443df750e12fe3355de64bba3367b97cd47685b9e2c4ac99df876e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T13:44:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78584d1d5e20c6a4098b8a74fac9160da21e907c754ac99b5e6c3dfced2127da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T13:44:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T13:44:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T13:44:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:00Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.534938 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.534981 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.534995 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.535014 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.535029 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.637279 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.637319 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.637336 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.637358 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.637374 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.740480 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.741033 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.741227 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.741374 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.741529 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.844802 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.844848 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.844867 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.844891 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.844910 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.947623 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.947677 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.947693 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.947717 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:00 crc kubenswrapper[4634]: I0929 13:46:00.947735 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:00Z","lastTransitionTime":"2025-09-29T13:46:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.051339 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.051403 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.051429 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.051459 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.051481 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.109406 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:01 crc kubenswrapper[4634]: E0929 13:46:01.109552 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.154328 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.154396 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.154415 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.154443 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.154461 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.258023 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.258064 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.258075 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.258113 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.258128 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.361539 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.362062 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.362346 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.362553 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.362753 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.466002 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.466169 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.466189 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.466215 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.466238 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.569791 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.569852 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.569873 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.569904 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.569920 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.673621 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.673683 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.673701 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.673726 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.673744 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.775418 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.775486 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.775502 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.775517 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.775527 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.878190 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.878470 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.878567 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.878655 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.878737 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.981962 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.982403 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.982566 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.982665 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:01 crc kubenswrapper[4634]: I0929 13:46:01.982755 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:01Z","lastTransitionTime":"2025-09-29T13:46:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.085305 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.085386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.085405 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.085982 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.086007 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.110422 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:02 crc kubenswrapper[4634]: E0929 13:46:02.110582 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.110755 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.110868 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:02 crc kubenswrapper[4634]: E0929 13:46:02.110995 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:02 crc kubenswrapper[4634]: E0929 13:46:02.111265 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.189408 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.189482 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.189499 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.189527 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.189545 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.291407 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.291456 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.291474 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.291497 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.291515 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.394690 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.394786 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.394807 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.394832 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.394854 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.497399 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.497472 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.497496 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.497528 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.497551 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.600883 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.600945 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.600966 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.600996 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.601019 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.702885 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.702931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.702943 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.702962 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.702974 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.805837 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.805903 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.805917 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.805933 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.805944 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.909147 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.909234 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.909250 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.909276 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:02 crc kubenswrapper[4634]: I0929 13:46:02.909295 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:02Z","lastTransitionTime":"2025-09-29T13:46:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.012768 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.012841 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.012857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.012877 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.012895 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.109402 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:03 crc kubenswrapper[4634]: E0929 13:46:03.110346 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.115286 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.115327 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.115340 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.115356 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.115367 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.218570 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.218632 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.218650 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.218674 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.218690 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.322954 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.323023 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.323041 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.323066 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.323120 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.426341 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.426384 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.426401 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.426425 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.426442 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.529491 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.529548 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.529563 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.529590 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.529606 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.632232 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.632630 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.632865 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.633031 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.633228 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.736495 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.736912 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.737159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.737380 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.737554 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.840350 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.840411 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.840429 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.840453 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.840470 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.942942 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.943007 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.943021 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.943038 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:03 crc kubenswrapper[4634]: I0929 13:46:03.943050 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:03Z","lastTransitionTime":"2025-09-29T13:46:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.046309 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.046369 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.046412 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.046439 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.046457 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.109965 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.110070 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:04 crc kubenswrapper[4634]: E0929 13:46:04.110193 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.110238 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:04 crc kubenswrapper[4634]: E0929 13:46:04.110404 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:04 crc kubenswrapper[4634]: E0929 13:46:04.110417 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.149062 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.149141 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.149155 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.149200 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.149214 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.251908 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.251967 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.251985 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.252008 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.252026 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.354849 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.354877 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.354885 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.354899 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.354908 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.457341 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.457700 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.457974 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.458199 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.458384 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.560570 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.560638 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.560652 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.560669 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.560692 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.662829 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.662866 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.662878 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.662891 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.662900 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.765925 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.766002 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.766015 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.766034 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.766072 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.868199 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.868270 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.868290 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.868315 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.868331 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.971527 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.971596 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.971616 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.971642 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:04 crc kubenswrapper[4634]: I0929 13:46:04.971659 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:04Z","lastTransitionTime":"2025-09-29T13:46:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.073877 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.073931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.073949 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.073973 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.073990 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.110199 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:05 crc kubenswrapper[4634]: E0929 13:46:05.110349 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.176331 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.176367 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.176377 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.176390 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.176399 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.279020 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.279109 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.279125 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.279144 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.279155 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.381718 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.381773 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.381789 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.381812 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.381831 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.484703 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.484739 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.484749 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.484765 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.484777 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.587984 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.588038 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.588054 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.588078 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.588148 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.691250 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.691673 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.691823 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.691976 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.692156 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.795309 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.795653 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.795888 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.796143 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.796362 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.899335 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.899408 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.899429 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.899458 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:05 crc kubenswrapper[4634]: I0929 13:46:05.899482 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:05Z","lastTransitionTime":"2025-09-29T13:46:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.002253 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.002320 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.002340 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.002367 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.002386 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.105665 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.106021 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.106291 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.106547 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.106753 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.110251 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.110404 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:06 crc kubenswrapper[4634]: E0929 13:46:06.110414 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.110777 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:06 crc kubenswrapper[4634]: E0929 13:46:06.112133 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:06 crc kubenswrapper[4634]: E0929 13:46:06.111985 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.209826 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.209931 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.209959 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.209992 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.210019 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.312746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.313031 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.313152 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.313324 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.313409 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.417080 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.417496 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.417746 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.417951 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.418169 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.521296 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.521339 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.521356 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.521382 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.521425 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.623704 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.623750 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.623763 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.623782 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.623797 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.726285 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.726343 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.726361 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.726387 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.726404 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.829606 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.829651 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.829663 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.829681 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.829691 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.933432 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.933508 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.933530 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.933555 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:06 crc kubenswrapper[4634]: I0929 13:46:06.933573 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:06Z","lastTransitionTime":"2025-09-29T13:46:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.036188 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.036257 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.036280 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.036309 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.036333 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.109487 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:07 crc kubenswrapper[4634]: E0929 13:46:07.109850 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.111157 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:46:07 crc kubenswrapper[4634]: E0929 13:46:07.111428 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.139708 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.140012 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.140187 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.140294 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.140377 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.242585 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.242653 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.242665 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.242679 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.242688 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.344151 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.344175 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.344182 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.344196 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.344203 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.447269 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.447325 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.447343 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.447365 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.447381 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.551054 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.551100 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.551113 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.551128 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.551138 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.653139 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.653191 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.653202 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.653216 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.653225 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.737009 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.737071 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.737109 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.737133 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.737150 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: E0929 13:46:07.753434 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.758199 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.758238 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.758253 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.758273 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.758288 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: E0929 13:46:07.774206 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.778434 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.778482 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.778498 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.778519 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.778531 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: E0929 13:46:07.799166 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.803975 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.804031 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.804050 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.804076 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.804159 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: E0929 13:46:07.823720 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.829247 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.829357 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.829369 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.829386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.829398 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: E0929 13:46:07.848201 4634 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148072Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608872Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T13:46:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"28d18494-3e65-4b8e-b583-09026bdf9b9b\\\",\\\"systemUUID\\\":\\\"dfa74544-018f-4337-a1a0-0c08b95c16d0\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T13:46:07Z is after 2025-08-24T17:21:41Z" Sep 29 13:46:07 crc kubenswrapper[4634]: E0929 13:46:07.848377 4634 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.850463 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.850495 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.850506 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.850522 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.850533 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.953285 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.953347 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.953365 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.953389 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:07 crc kubenswrapper[4634]: I0929 13:46:07.953406 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:07Z","lastTransitionTime":"2025-09-29T13:46:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.058978 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.059145 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.059163 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.059187 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.059201 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.109885 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.110148 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.110193 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:08 crc kubenswrapper[4634]: E0929 13:46:08.110382 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:08 crc kubenswrapper[4634]: E0929 13:46:08.110469 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:08 crc kubenswrapper[4634]: E0929 13:46:08.110616 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.163391 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.163430 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.163441 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.163462 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.163474 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.266849 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.266896 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.266907 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.266925 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.266936 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.370063 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.370149 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.370164 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.370184 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.370227 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.473733 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.473780 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.473791 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.473807 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.473820 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.577203 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.577262 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.577282 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.577306 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.577320 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.681619 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.681736 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.681758 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.681792 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.681816 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.786087 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.786773 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.786937 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.787112 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.787314 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.890862 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.891382 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.891545 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.891694 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.891785 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.996442 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.996496 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.996515 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.996568 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:08 crc kubenswrapper[4634]: I0929 13:46:08.996588 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:08Z","lastTransitionTime":"2025-09-29T13:46:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.100520 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.100602 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.100620 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.100648 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.100666 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.113542 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:09 crc kubenswrapper[4634]: E0929 13:46:09.114046 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.203041 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.203140 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.203159 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.203186 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.203204 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.305566 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.305604 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.305614 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.305631 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.305642 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.408435 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.408497 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.408514 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.408538 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.408555 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.511583 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.511655 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.511678 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.511707 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.511726 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.615228 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.615299 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.615321 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.615348 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.615371 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.718892 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.718964 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.718975 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.718990 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.719006 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.821599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.821657 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.821673 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.821696 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.821713 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.923995 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.924050 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.924057 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.924070 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:09 crc kubenswrapper[4634]: I0929 13:46:09.924078 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:09Z","lastTransitionTime":"2025-09-29T13:46:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.025445 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.025473 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.025481 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.025493 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.025502 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.110579 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.110652 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:10 crc kubenswrapper[4634]: E0929 13:46:10.110708 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:10 crc kubenswrapper[4634]: E0929 13:46:10.110768 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.110940 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:10 crc kubenswrapper[4634]: E0929 13:46:10.111310 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.127636 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.127676 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.127686 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.127702 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.127715 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.170943 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podStartSLOduration=77.170918285 podStartE2EDuration="1m17.170918285s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.140104594 +0000 UTC m=+100.708832383" watchObservedRunningTime="2025-09-29 13:46:10.170918285 +0000 UTC m=+100.739646074" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.182320 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-qvsct" podStartSLOduration=77.182297749 podStartE2EDuration="1m17.182297749s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.182221727 +0000 UTC m=+100.750949476" watchObservedRunningTime="2025-09-29 13:46:10.182297749 +0000 UTC m=+100.751025538" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.219957 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-wtnjd" podStartSLOduration=77.219937798 podStartE2EDuration="1m17.219937798s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.205538141 +0000 UTC m=+100.774265900" watchObservedRunningTime="2025-09-29 13:46:10.219937798 +0000 UTC m=+100.788665547" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.230291 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.230319 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.230327 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.230351 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.230359 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.256726 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-sxkt4" podStartSLOduration=77.256706633 podStartE2EDuration="1m17.256706633s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.256376684 +0000 UTC m=+100.825104453" watchObservedRunningTime="2025-09-29 13:46:10.256706633 +0000 UTC m=+100.825434382" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.273639 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-b59nb" podStartSLOduration=77.27361514 podStartE2EDuration="1m17.27361514s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.27360376 +0000 UTC m=+100.842331509" watchObservedRunningTime="2025-09-29 13:46:10.27361514 +0000 UTC m=+100.842342909" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.298887 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=77.298838236 podStartE2EDuration="1m17.298838236s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.297911541 +0000 UTC m=+100.866639330" watchObservedRunningTime="2025-09-29 13:46:10.298838236 +0000 UTC m=+100.867565985" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.322571 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=81.322549982 podStartE2EDuration="1m21.322549982s" podCreationTimestamp="2025-09-29 13:44:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.311372143 +0000 UTC m=+100.880099902" watchObservedRunningTime="2025-09-29 13:46:10.322549982 +0000 UTC m=+100.891277741" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.323035 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=15.323023215 podStartE2EDuration="15.323023215s" podCreationTimestamp="2025-09-29 13:45:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.321985576 +0000 UTC m=+100.890713315" watchObservedRunningTime="2025-09-29 13:46:10.323023215 +0000 UTC m=+100.891750984" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.331807 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.331846 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.331855 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.331869 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.331878 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.364160 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-vgbtn" podStartSLOduration=76.36414358 podStartE2EDuration="1m16.36414358s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.364003216 +0000 UTC m=+100.932730965" watchObservedRunningTime="2025-09-29 13:46:10.36414358 +0000 UTC m=+100.932871319" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.378079 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=51.378064895 podStartE2EDuration="51.378064895s" podCreationTimestamp="2025-09-29 13:45:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.37756629 +0000 UTC m=+100.946294039" watchObservedRunningTime="2025-09-29 13:46:10.378064895 +0000 UTC m=+100.946792644" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.434193 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.434239 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.434252 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.434268 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.434282 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.536920 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.536950 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.536959 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.536972 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.536983 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.639287 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.639376 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.639404 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.639435 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.639458 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.741859 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.741923 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.741943 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.741957 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.741965 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.845386 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.845439 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.845456 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.845481 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.845499 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.948251 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.948304 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.948322 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.948345 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:10 crc kubenswrapper[4634]: I0929 13:46:10.948361 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:10Z","lastTransitionTime":"2025-09-29T13:46:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.051936 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.052074 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.052135 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.052164 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.052187 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.109419 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:11 crc kubenswrapper[4634]: E0929 13:46:11.109600 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.154616 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.154647 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.154656 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.154673 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.154683 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.257797 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.257914 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.257932 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.257956 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.257971 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.361900 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.361946 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.361958 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.361974 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.361985 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.464622 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.464695 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.464717 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.464749 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.464773 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.567791 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.567852 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.567870 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.567896 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.567918 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.670468 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.670568 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.670636 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.670663 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.670699 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.691501 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:11 crc kubenswrapper[4634]: E0929 13:46:11.691669 4634 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:46:11 crc kubenswrapper[4634]: E0929 13:46:11.691726 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs podName:85c1b26c-a922-4d3c-934f-e6968735a76e nodeName:}" failed. No retries permitted until 2025-09-29 13:47:15.691709816 +0000 UTC m=+166.260437565 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs") pod "network-metrics-daemon-nl5xm" (UID: "85c1b26c-a922-4d3c-934f-e6968735a76e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.774114 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.774155 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.774166 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.774211 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.774227 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.876896 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.876936 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.876955 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.876982 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.876999 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.979601 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.979661 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.979678 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.979700 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:11 crc kubenswrapper[4634]: I0929 13:46:11.979717 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:11Z","lastTransitionTime":"2025-09-29T13:46:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.082443 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.082497 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.082506 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.082520 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.082529 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.109926 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.109969 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:12 crc kubenswrapper[4634]: E0929 13:46:12.110042 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.110125 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:12 crc kubenswrapper[4634]: E0929 13:46:12.110288 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:12 crc kubenswrapper[4634]: E0929 13:46:12.110326 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.185164 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.185233 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.185243 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.185259 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.185279 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.287563 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.287625 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.287643 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.287668 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.287686 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.390997 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.391052 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.391171 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.391209 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.391229 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.494633 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.494711 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.494730 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.494760 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.494779 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.597783 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.597822 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.597830 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.597845 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.597855 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.700728 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.700828 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.700847 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.700873 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.700891 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.804015 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.804408 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.804479 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.804560 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.804633 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.907467 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.907546 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.907563 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.907587 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:12 crc kubenswrapper[4634]: I0929 13:46:12.907607 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:12Z","lastTransitionTime":"2025-09-29T13:46:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.009966 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.010034 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.010057 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.010072 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.010112 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.109651 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:13 crc kubenswrapper[4634]: E0929 13:46:13.109840 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.112984 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.113037 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.113055 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.113076 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.113128 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.216338 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.216408 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.216427 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.216454 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.216474 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.318488 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.319014 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.319123 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.319204 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.319269 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.422481 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.422550 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.422572 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.422600 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.422625 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.524770 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.524805 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.524837 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.524855 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.524869 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.626703 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.626761 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.626773 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.626790 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.626799 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.729364 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.729400 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.729409 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.729441 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.729449 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.832523 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.832556 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.832566 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.832581 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.832593 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.934734 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.934793 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.934809 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.934836 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:13 crc kubenswrapper[4634]: I0929 13:46:13.934854 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:13Z","lastTransitionTime":"2025-09-29T13:46:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.036922 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.037258 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.037457 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.037604 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.037749 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.109983 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.110037 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.109986 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:14 crc kubenswrapper[4634]: E0929 13:46:14.110109 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:14 crc kubenswrapper[4634]: E0929 13:46:14.110203 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:14 crc kubenswrapper[4634]: E0929 13:46:14.110457 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.141371 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.141551 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.141575 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.141599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.141616 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.245220 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.245306 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.245324 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.245349 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.245368 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.347307 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.347394 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.347417 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.347443 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.347461 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.450660 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.450718 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.450734 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.450761 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.450780 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.554168 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.554242 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.554266 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.554296 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.554317 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.657721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.657783 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.657800 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.657824 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.657842 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.761170 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.761582 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.761702 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.761854 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.762008 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.864358 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.864392 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.864403 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.864419 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.864432 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.966754 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.966800 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.966814 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.966833 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:14 crc kubenswrapper[4634]: I0929 13:46:14.966847 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:14Z","lastTransitionTime":"2025-09-29T13:46:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.070106 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.070160 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.070176 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.070197 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.070213 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.109393 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:15 crc kubenswrapper[4634]: E0929 13:46:15.109654 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.173529 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.173962 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.174281 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.174506 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.174713 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.278062 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.278186 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.278212 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.278235 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.278254 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.381422 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.382519 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.382721 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.382979 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.383187 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.485701 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.485752 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.485767 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.485787 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.485800 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.589850 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.589909 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.589928 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.589956 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.589973 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.694290 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.695079 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.695289 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.695433 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.695582 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.799162 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.799544 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.799710 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.799830 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.799947 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.903584 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.903627 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.903644 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.903667 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:15 crc kubenswrapper[4634]: I0929 13:46:15.903684 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:15Z","lastTransitionTime":"2025-09-29T13:46:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.007220 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.007277 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.007293 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.007316 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.007332 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.109318 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:16 crc kubenswrapper[4634]: E0929 13:46:16.109494 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.110527 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:16 crc kubenswrapper[4634]: E0929 13:46:16.110640 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.110819 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:16 crc kubenswrapper[4634]: E0929 13:46:16.110912 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.111068 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.111146 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.111261 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.111330 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.111367 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.214337 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.214857 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.215282 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.215340 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.215360 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.318261 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.318315 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.318334 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.318356 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.318375 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.420804 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.420843 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.420855 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.420871 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.420883 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.523038 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.523077 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.523104 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.523119 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.523130 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.626278 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.626337 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.626355 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.626378 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.626394 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.728766 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.728836 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.728854 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.728879 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.728897 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.831535 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.831852 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.831986 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.832086 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.832223 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.935348 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.935659 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.935834 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.935985 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:16 crc kubenswrapper[4634]: I0929 13:46:16.936162 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:16Z","lastTransitionTime":"2025-09-29T13:46:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.039775 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.040350 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.040481 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.040599 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.040718 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.109450 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:17 crc kubenswrapper[4634]: E0929 13:46:17.109819 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.143004 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.143069 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.143139 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.143169 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.143191 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.245872 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.246208 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.246300 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.246408 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.246494 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.349275 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.349312 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.349320 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.349333 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.349344 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.451844 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.451895 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.451911 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.451933 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.451951 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.554730 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.554767 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.554776 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.554790 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.554799 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.656354 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.656390 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.656400 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.656413 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.656422 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.758772 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.758832 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.758849 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.758873 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.758889 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.860843 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.860877 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.860885 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.860907 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.860916 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.953182 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.953203 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.953211 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.953224 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 13:46:17 crc kubenswrapper[4634]: I0929 13:46:17.953232 4634 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T13:46:17Z","lastTransitionTime":"2025-09-29T13:46:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.006493 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.006460845 podStartE2EDuration="1m28.006460845s" podCreationTimestamp="2025-09-29 13:44:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:10.427642813 +0000 UTC m=+100.996370562" watchObservedRunningTime="2025-09-29 13:46:18.006460845 +0000 UTC m=+108.575188594" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.007353 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n"] Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.007811 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.010002 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.010288 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.010345 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.011183 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.063336 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a1407202-f1c2-4ec5-9592-35dcb072fa6f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.063925 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1407202-f1c2-4ec5-9592-35dcb072fa6f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.064339 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a1407202-f1c2-4ec5-9592-35dcb072fa6f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.064415 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1407202-f1c2-4ec5-9592-35dcb072fa6f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.064459 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a1407202-f1c2-4ec5-9592-35dcb072fa6f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.110184 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:18 crc kubenswrapper[4634]: E0929 13:46:18.110575 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.110836 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:18 crc kubenswrapper[4634]: E0929 13:46:18.111055 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.111463 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:18 crc kubenswrapper[4634]: E0929 13:46:18.111692 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.166023 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a1407202-f1c2-4ec5-9592-35dcb072fa6f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.166122 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1407202-f1c2-4ec5-9592-35dcb072fa6f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.166157 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a1407202-f1c2-4ec5-9592-35dcb072fa6f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.166248 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a1407202-f1c2-4ec5-9592-35dcb072fa6f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.166285 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1407202-f1c2-4ec5-9592-35dcb072fa6f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.166248 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/a1407202-f1c2-4ec5-9592-35dcb072fa6f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.166869 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/a1407202-f1c2-4ec5-9592-35dcb072fa6f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.167511 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a1407202-f1c2-4ec5-9592-35dcb072fa6f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.173152 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1407202-f1c2-4ec5-9592-35dcb072fa6f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.182786 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a1407202-f1c2-4ec5-9592-35dcb072fa6f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xs69n\" (UID: \"a1407202-f1c2-4ec5-9592-35dcb072fa6f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.332782 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" Sep 29 13:46:18 crc kubenswrapper[4634]: W0929 13:46:18.356474 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1407202_f1c2_4ec5_9592_35dcb072fa6f.slice/crio-5e3f076b3c7097fe50964351aa314a0d67b1b8b9ac5d8aa28bfbb75432ac71d9 WatchSource:0}: Error finding container 5e3f076b3c7097fe50964351aa314a0d67b1b8b9ac5d8aa28bfbb75432ac71d9: Status 404 returned error can't find the container with id 5e3f076b3c7097fe50964351aa314a0d67b1b8b9ac5d8aa28bfbb75432ac71d9 Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.661756 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" event={"ID":"a1407202-f1c2-4ec5-9592-35dcb072fa6f","Type":"ContainerStarted","Data":"6b5aa4ec8e51c975ac1295bac5b6168185530ffe49ae96314cbe66425fafa4b2"} Sep 29 13:46:18 crc kubenswrapper[4634]: I0929 13:46:18.661843 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" event={"ID":"a1407202-f1c2-4ec5-9592-35dcb072fa6f","Type":"ContainerStarted","Data":"5e3f076b3c7097fe50964351aa314a0d67b1b8b9ac5d8aa28bfbb75432ac71d9"} Sep 29 13:46:19 crc kubenswrapper[4634]: I0929 13:46:19.110021 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:19 crc kubenswrapper[4634]: E0929 13:46:19.110486 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:20 crc kubenswrapper[4634]: I0929 13:46:20.109897 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:20 crc kubenswrapper[4634]: I0929 13:46:20.109954 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:20 crc kubenswrapper[4634]: I0929 13:46:20.110008 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:20 crc kubenswrapper[4634]: E0929 13:46:20.111788 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:20 crc kubenswrapper[4634]: E0929 13:46:20.111916 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:20 crc kubenswrapper[4634]: E0929 13:46:20.112151 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:21 crc kubenswrapper[4634]: I0929 13:46:21.110066 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:21 crc kubenswrapper[4634]: E0929 13:46:21.110254 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:22 crc kubenswrapper[4634]: I0929 13:46:22.110051 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:22 crc kubenswrapper[4634]: E0929 13:46:22.110162 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:22 crc kubenswrapper[4634]: I0929 13:46:22.110339 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:22 crc kubenswrapper[4634]: I0929 13:46:22.110772 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:22 crc kubenswrapper[4634]: E0929 13:46:22.110857 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:22 crc kubenswrapper[4634]: E0929 13:46:22.110990 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:22 crc kubenswrapper[4634]: I0929 13:46:22.111252 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:46:22 crc kubenswrapper[4634]: E0929 13:46:22.111536 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-jqlh6_openshift-ovn-kubernetes(65f06677-4cbf-41c9-a0da-02f49710c11c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" Sep 29 13:46:23 crc kubenswrapper[4634]: I0929 13:46:23.110145 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:23 crc kubenswrapper[4634]: E0929 13:46:23.110346 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:24 crc kubenswrapper[4634]: I0929 13:46:24.109606 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:24 crc kubenswrapper[4634]: E0929 13:46:24.109896 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:24 crc kubenswrapper[4634]: I0929 13:46:24.109942 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:24 crc kubenswrapper[4634]: I0929 13:46:24.110133 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:24 crc kubenswrapper[4634]: E0929 13:46:24.110204 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:24 crc kubenswrapper[4634]: E0929 13:46:24.110129 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:25 crc kubenswrapper[4634]: I0929 13:46:25.110105 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:25 crc kubenswrapper[4634]: E0929 13:46:25.110217 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:26 crc kubenswrapper[4634]: I0929 13:46:26.109736 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:26 crc kubenswrapper[4634]: E0929 13:46:26.110137 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:26 crc kubenswrapper[4634]: I0929 13:46:26.110365 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:26 crc kubenswrapper[4634]: E0929 13:46:26.110425 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:26 crc kubenswrapper[4634]: I0929 13:46:26.110650 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:26 crc kubenswrapper[4634]: E0929 13:46:26.110715 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:27 crc kubenswrapper[4634]: I0929 13:46:27.110257 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:27 crc kubenswrapper[4634]: E0929 13:46:27.110472 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:27 crc kubenswrapper[4634]: I0929 13:46:27.687794 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/1.log" Sep 29 13:46:27 crc kubenswrapper[4634]: I0929 13:46:27.688397 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/0.log" Sep 29 13:46:27 crc kubenswrapper[4634]: I0929 13:46:27.688483 4634 generic.go:334] "Generic (PLEG): container finished" podID="77b5113e-50cd-417c-8991-cae5cd823f3f" containerID="ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3" exitCode=1 Sep 29 13:46:27 crc kubenswrapper[4634]: I0929 13:46:27.688515 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wtnjd" event={"ID":"77b5113e-50cd-417c-8991-cae5cd823f3f","Type":"ContainerDied","Data":"ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3"} Sep 29 13:46:27 crc kubenswrapper[4634]: I0929 13:46:27.688555 4634 scope.go:117] "RemoveContainer" containerID="9df6d824ee4f2f0f635fbffc96eb50917c019462b051b1f2c8f5cc8ce7edde9c" Sep 29 13:46:27 crc kubenswrapper[4634]: I0929 13:46:27.689291 4634 scope.go:117] "RemoveContainer" containerID="ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3" Sep 29 13:46:27 crc kubenswrapper[4634]: E0929 13:46:27.689549 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-wtnjd_openshift-multus(77b5113e-50cd-417c-8991-cae5cd823f3f)\"" pod="openshift-multus/multus-wtnjd" podUID="77b5113e-50cd-417c-8991-cae5cd823f3f" Sep 29 13:46:27 crc kubenswrapper[4634]: I0929 13:46:27.707825 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xs69n" podStartSLOduration=94.707800498 podStartE2EDuration="1m34.707800498s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:18.678168641 +0000 UTC m=+109.246896480" watchObservedRunningTime="2025-09-29 13:46:27.707800498 +0000 UTC m=+118.276528287" Sep 29 13:46:28 crc kubenswrapper[4634]: I0929 13:46:28.109407 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:28 crc kubenswrapper[4634]: E0929 13:46:28.109528 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:28 crc kubenswrapper[4634]: I0929 13:46:28.109568 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:28 crc kubenswrapper[4634]: E0929 13:46:28.109741 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:28 crc kubenswrapper[4634]: I0929 13:46:28.109983 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:28 crc kubenswrapper[4634]: E0929 13:46:28.110277 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:28 crc kubenswrapper[4634]: I0929 13:46:28.694785 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/1.log" Sep 29 13:46:29 crc kubenswrapper[4634]: I0929 13:46:29.109907 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:29 crc kubenswrapper[4634]: E0929 13:46:29.110027 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:30 crc kubenswrapper[4634]: I0929 13:46:30.109238 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:30 crc kubenswrapper[4634]: I0929 13:46:30.109242 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:30 crc kubenswrapper[4634]: I0929 13:46:30.109292 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:30 crc kubenswrapper[4634]: E0929 13:46:30.110015 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:30 crc kubenswrapper[4634]: E0929 13:46:30.110143 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:30 crc kubenswrapper[4634]: E0929 13:46:30.110204 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:30 crc kubenswrapper[4634]: E0929 13:46:30.123965 4634 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 29 13:46:30 crc kubenswrapper[4634]: E0929 13:46:30.190906 4634 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 13:46:31 crc kubenswrapper[4634]: I0929 13:46:31.111227 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:31 crc kubenswrapper[4634]: E0929 13:46:31.111529 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:32 crc kubenswrapper[4634]: I0929 13:46:32.110158 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:32 crc kubenswrapper[4634]: I0929 13:46:32.110202 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:32 crc kubenswrapper[4634]: I0929 13:46:32.110272 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:32 crc kubenswrapper[4634]: E0929 13:46:32.111055 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:32 crc kubenswrapper[4634]: E0929 13:46:32.111136 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:32 crc kubenswrapper[4634]: E0929 13:46:32.110904 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:33 crc kubenswrapper[4634]: I0929 13:46:33.109144 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:33 crc kubenswrapper[4634]: E0929 13:46:33.109293 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:34 crc kubenswrapper[4634]: I0929 13:46:34.110121 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:34 crc kubenswrapper[4634]: E0929 13:46:34.110247 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:34 crc kubenswrapper[4634]: I0929 13:46:34.110373 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:34 crc kubenswrapper[4634]: I0929 13:46:34.110380 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:34 crc kubenswrapper[4634]: E0929 13:46:34.110558 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:34 crc kubenswrapper[4634]: E0929 13:46:34.110651 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:35 crc kubenswrapper[4634]: I0929 13:46:35.109368 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:35 crc kubenswrapper[4634]: E0929 13:46:35.109556 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:35 crc kubenswrapper[4634]: E0929 13:46:35.192416 4634 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 13:46:36 crc kubenswrapper[4634]: I0929 13:46:36.109217 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:36 crc kubenswrapper[4634]: E0929 13:46:36.109438 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:36 crc kubenswrapper[4634]: I0929 13:46:36.109699 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:36 crc kubenswrapper[4634]: E0929 13:46:36.109797 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:36 crc kubenswrapper[4634]: I0929 13:46:36.110577 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:36 crc kubenswrapper[4634]: E0929 13:46:36.110705 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:37 crc kubenswrapper[4634]: I0929 13:46:37.110105 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:37 crc kubenswrapper[4634]: E0929 13:46:37.110302 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:37 crc kubenswrapper[4634]: I0929 13:46:37.112907 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:46:37 crc kubenswrapper[4634]: I0929 13:46:37.726542 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/3.log" Sep 29 13:46:37 crc kubenswrapper[4634]: I0929 13:46:37.729433 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerStarted","Data":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} Sep 29 13:46:37 crc kubenswrapper[4634]: I0929 13:46:37.729801 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:46:37 crc kubenswrapper[4634]: I0929 13:46:37.760194 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podStartSLOduration=104.760179174 podStartE2EDuration="1m44.760179174s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:37.758889658 +0000 UTC m=+128.327617407" watchObservedRunningTime="2025-09-29 13:46:37.760179174 +0000 UTC m=+128.328906923" Sep 29 13:46:38 crc kubenswrapper[4634]: I0929 13:46:38.003853 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nl5xm"] Sep 29 13:46:38 crc kubenswrapper[4634]: I0929 13:46:38.004134 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:38 crc kubenswrapper[4634]: E0929 13:46:38.004325 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:38 crc kubenswrapper[4634]: I0929 13:46:38.109381 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:38 crc kubenswrapper[4634]: E0929 13:46:38.109508 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:38 crc kubenswrapper[4634]: I0929 13:46:38.109714 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:38 crc kubenswrapper[4634]: E0929 13:46:38.109777 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:38 crc kubenswrapper[4634]: I0929 13:46:38.110033 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:38 crc kubenswrapper[4634]: E0929 13:46:38.110122 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:40 crc kubenswrapper[4634]: I0929 13:46:40.109541 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:40 crc kubenswrapper[4634]: I0929 13:46:40.111822 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:40 crc kubenswrapper[4634]: I0929 13:46:40.111961 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:40 crc kubenswrapper[4634]: I0929 13:46:40.112024 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:40 crc kubenswrapper[4634]: I0929 13:46:40.112165 4634 scope.go:117] "RemoveContainer" containerID="ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3" Sep 29 13:46:40 crc kubenswrapper[4634]: E0929 13:46:40.112568 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:40 crc kubenswrapper[4634]: E0929 13:46:40.112699 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:40 crc kubenswrapper[4634]: E0929 13:46:40.112816 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:40 crc kubenswrapper[4634]: E0929 13:46:40.112931 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:40 crc kubenswrapper[4634]: E0929 13:46:40.193663 4634 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 13:46:40 crc kubenswrapper[4634]: I0929 13:46:40.742671 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/1.log" Sep 29 13:46:40 crc kubenswrapper[4634]: I0929 13:46:40.743065 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wtnjd" event={"ID":"77b5113e-50cd-417c-8991-cae5cd823f3f","Type":"ContainerStarted","Data":"639b3b2ff647f9ebecb99109c34868c82c3aeda6e6eb0d1a1abf777bb5bb4643"} Sep 29 13:46:42 crc kubenswrapper[4634]: I0929 13:46:42.109886 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:42 crc kubenswrapper[4634]: I0929 13:46:42.110017 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:42 crc kubenswrapper[4634]: I0929 13:46:42.110105 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:42 crc kubenswrapper[4634]: E0929 13:46:42.110193 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:42 crc kubenswrapper[4634]: I0929 13:46:42.110220 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:42 crc kubenswrapper[4634]: E0929 13:46:42.110424 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:42 crc kubenswrapper[4634]: E0929 13:46:42.110610 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:42 crc kubenswrapper[4634]: E0929 13:46:42.110748 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:44 crc kubenswrapper[4634]: I0929 13:46:44.109147 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:44 crc kubenswrapper[4634]: I0929 13:46:44.109187 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:44 crc kubenswrapper[4634]: I0929 13:46:44.109187 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:44 crc kubenswrapper[4634]: I0929 13:46:44.109146 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:44 crc kubenswrapper[4634]: E0929 13:46:44.109267 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 13:46:44 crc kubenswrapper[4634]: E0929 13:46:44.109358 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nl5xm" podUID="85c1b26c-a922-4d3c-934f-e6968735a76e" Sep 29 13:46:44 crc kubenswrapper[4634]: E0929 13:46:44.109412 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 13:46:44 crc kubenswrapper[4634]: E0929 13:46:44.109539 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.109167 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.109191 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.109198 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.109180 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.113790 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.113963 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.114055 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.113790 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.114458 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 29 13:46:46 crc kubenswrapper[4634]: I0929 13:46:46.116020 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 29 13:46:47 crc kubenswrapper[4634]: I0929 13:46:47.931745 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.623396 4634 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.665006 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-5nvq7"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.665465 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.668208 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pccc9"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.669450 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-kcrwt"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.671164 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.671273 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.671599 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.670074 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-kcrwt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686194 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686214 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686372 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686474 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686717 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686838 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686915 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686984 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.687021 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.686376 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.688492 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.688657 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.688701 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.688772 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.688795 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.689352 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwzp5"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.689384 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.689611 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.689615 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.693006 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4vmhr"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.693355 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.693546 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.693758 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.693989 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.694166 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.701269 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.702333 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.702807 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vvk4f"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.703130 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.703470 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.709307 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.709481 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.709932 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.710525 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.710603 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.710690 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.710766 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.710842 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.710973 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.711044 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.711130 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.711200 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.711270 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.712455 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.712578 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.712781 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.713871 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.714005 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.714166 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.714359 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.717441 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-md5b8"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.717904 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.718329 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.718369 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.718432 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.718834 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.718905 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.718982 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.719126 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.721210 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.721413 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.721652 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.721782 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.721948 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.722209 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.722671 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7tbnt"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.722742 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.722936 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mqhd"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.723054 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.723181 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.723348 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.723512 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.724916 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.728599 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.728774 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.728930 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.729046 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.729259 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.742386 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-64xkr"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.743143 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rgfdh"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.756342 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.756443 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.757050 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.756675 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.773148 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.773669 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.773775 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.773787 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.773851 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.773917 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774002 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774075 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774129 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774098 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774443 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774526 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774626 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774682 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-27cm7"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.774729 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.775113 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-5nvq7"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.776651 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.776760 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.777147 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.779709 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.779931 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.780056 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-kcrwt"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.780075 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.780160 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.780278 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.780332 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pccc9"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.780358 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.780553 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.783186 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.783390 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.783483 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.784250 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.785877 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-x2lkr"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.786488 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.787805 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.788130 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.792496 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.793181 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.793861 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.794119 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.794961 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.795175 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.795337 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.795392 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.795794 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.795965 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.796108 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.796226 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.796352 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.797072 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.802596 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.823328 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.823607 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.824797 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.825023 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.825325 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.825431 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.825792 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.826048 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.826450 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.829612 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.829926 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.830309 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.831780 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.832568 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.849862 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.854133 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.855138 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.858602 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c722e944-c116-40fe-a812-3f7dad194f1a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.858672 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c722e944-c116-40fe-a812-3f7dad194f1a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.858738 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd8zh\" (UniqueName: \"kubernetes.io/projected/c722e944-c116-40fe-a812-3f7dad194f1a-kube-api-access-fd8zh\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.862111 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4vmhr"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.863403 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwzp5"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.876767 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.876989 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.877314 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.880708 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.881069 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.881490 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9v7l5"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.881854 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.889384 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.889609 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.889847 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.890722 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.891115 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.895151 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.895948 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.896396 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.899718 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.900191 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-zjrr2"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.900833 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.901555 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.901813 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.902019 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.902507 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.902925 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.903372 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.903619 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.903869 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7hz92"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.904977 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.908879 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.911497 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-m4gqz"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.912149 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.913578 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mwwmx"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.914233 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.914352 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.916163 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.921025 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-md5b8"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.921812 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.922792 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rgfdh"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.925028 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.925988 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-64xkr"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.927247 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.927377 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-8h72q"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.929046 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-9z4x7"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.929349 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.929509 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.929609 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.930898 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-m4gqz"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.932014 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-zjrr2"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.932967 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7tbnt"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.934166 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.935179 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.936173 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.937294 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.938274 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.939304 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.940712 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.941675 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mqhd"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.942697 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.943811 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-27cm7"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.944813 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vvk4f"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.945814 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9z4x7"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.947021 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.948099 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.950487 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.957348 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.960630 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd8zh\" (UniqueName: \"kubernetes.io/projected/c722e944-c116-40fe-a812-3f7dad194f1a-kube-api-access-fd8zh\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.960684 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c722e944-c116-40fe-a812-3f7dad194f1a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.960738 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c722e944-c116-40fe-a812-3f7dad194f1a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.961423 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c722e944-c116-40fe-a812-3f7dad194f1a-config\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.961490 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.963326 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mwwmx"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.965285 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.965655 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.967297 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.967682 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.969989 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9v7l5"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.971964 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7hz92"] Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.973325 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c722e944-c116-40fe-a812-3f7dad194f1a-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:48 crc kubenswrapper[4634]: I0929 13:46:48.988534 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.008365 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.027604 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.067277 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.088202 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.107494 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.133411 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.147870 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.167561 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.189388 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.208509 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.227136 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.248830 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.268390 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.289003 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.308493 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.328161 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.347673 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.368199 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.408440 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.427481 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.447835 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.465867 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-config\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.465951 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq5vn\" (UniqueName: \"kubernetes.io/projected/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-kube-api-access-mq5vn\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.465997 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-bound-sa-token\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466029 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv2wn\" (UniqueName: \"kubernetes.io/projected/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-kube-api-access-wv2wn\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466060 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/97f23fc8-1289-47c6-8bfc-49c7a338064d-audit-dir\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466139 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466204 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466308 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466368 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-trusted-ca\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466411 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-service-ca\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466440 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-config\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466498 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466552 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466589 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg584\" (UniqueName: \"kubernetes.io/projected/172d0968-1bd7-48d4-9bcd-62590bead86c-kube-api-access-tg584\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466622 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4024807b-080e-4f06-a78e-021f46ec69af-serving-cert\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466655 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e396a94f-fce7-4877-b19c-29f13bf2ffea-metrics-tls\") pod \"dns-operator-744455d44c-4vmhr\" (UID: \"e396a94f-fce7-4877-b19c-29f13bf2ffea\") " pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466728 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-trusted-ca-bundle\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466764 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-client\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466810 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-service-ca-bundle\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466848 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-serving-cert\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466878 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-audit-policies\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.466912 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ff58d142-bb1b-4d79-be30-7d8c42951e4c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467159 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467246 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467379 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4bc890e-57a2-4633-88bf-cb66c90293e8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467419 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467452 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467485 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-ca\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467514 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6bsx\" (UniqueName: \"kubernetes.io/projected/5d677c42-66b0-47b9-904a-e2ef1049806e-kube-api-access-s6bsx\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467545 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-etcd-serving-ca\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467680 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-config\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467719 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-config\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467753 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-serving-cert\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467808 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpxlb\" (UniqueName: \"kubernetes.io/projected/ff58d142-bb1b-4d79-be30-7d8c42951e4c-kube-api-access-dpxlb\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.467911 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-serving-cert\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468028 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-config\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468118 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468149 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468184 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkgtw\" (UniqueName: \"kubernetes.io/projected/97f23fc8-1289-47c6-8bfc-49c7a338064d-kube-api-access-kkgtw\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468220 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ff58d142-bb1b-4d79-be30-7d8c42951e4c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468252 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-audit\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468283 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-service-ca\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468332 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468361 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d19555df-603d-4db0-9ea2-9a473a55380d-config\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468394 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4024807b-080e-4f06-a78e-021f46ec69af-available-featuregates\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468424 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d19555df-603d-4db0-9ea2-9a473a55380d-serving-cert\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468452 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d19555df-603d-4db0-9ea2-9a473a55380d-trusted-ca\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468479 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-serving-cert\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468510 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-certificates\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468540 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-machine-approver-tls\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468567 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-etcd-client\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468594 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468641 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-oauth-serving-cert\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468672 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtqwc\" (UniqueName: \"kubernetes.io/projected/d19555df-603d-4db0-9ea2-9a473a55380d-kube-api-access-gtqwc\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468702 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-dir\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468765 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-etcd-client\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468794 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-oauth-config\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468822 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d677c42-66b0-47b9-904a-e2ef1049806e-serving-cert\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468855 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5622c47f-366e-4649-86ad-76c631616d12-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-c2w5x\" (UID: \"5622c47f-366e-4649-86ad-76c631616d12\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468886 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-config\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468915 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-encryption-config\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468946 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwds8\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-kube-api-access-qwds8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.468976 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-client-ca\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469004 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8k98\" (UniqueName: \"kubernetes.io/projected/cfca5304-d886-40b0-93ea-cb412ba053f9-kube-api-access-z8k98\") pod \"downloads-7954f5f757-kcrwt\" (UID: \"cfca5304-d886-40b0-93ea-cb412ba053f9\") " pod="openshift-console/downloads-7954f5f757-kcrwt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469033 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-serving-cert\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469063 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469120 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/97f23fc8-1289-47c6-8bfc-49c7a338064d-node-pullsecrets\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469150 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ff58d142-bb1b-4d79-be30-7d8c42951e4c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469179 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-auth-proxy-config\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469210 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-client-ca\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469241 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4pth\" (UniqueName: \"kubernetes.io/projected/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-kube-api-access-z4pth\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469286 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469317 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-console-config\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469375 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bfgc\" (UniqueName: \"kubernetes.io/projected/e396a94f-fce7-4877-b19c-29f13bf2ffea-kube-api-access-9bfgc\") pod \"dns-operator-744455d44c-4vmhr\" (UID: \"e396a94f-fce7-4877-b19c-29f13bf2ffea\") " pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469417 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdsqg\" (UniqueName: \"kubernetes.io/projected/95f9d479-e9b9-4086-8792-83625bfaff6e-kube-api-access-vdsqg\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469448 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6e8b69ca-c934-4baa-8957-62b6aff5babc-audit-dir\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469477 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-encryption-config\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469510 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25nxh\" (UniqueName: \"kubernetes.io/projected/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-kube-api-access-25nxh\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.469917 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5ml6\" (UniqueName: \"kubernetes.io/projected/4024807b-080e-4f06-a78e-021f46ec69af-kube-api-access-f5ml6\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470035 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4bc890e-57a2-4633-88bf-cb66c90293e8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.470194 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:49.970162246 +0000 UTC m=+140.538890025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470255 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-serving-cert\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470287 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-image-import-ca\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470316 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470345 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-policies\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470374 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470406 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnmcw\" (UniqueName: \"kubernetes.io/projected/6e8b69ca-c934-4baa-8957-62b6aff5babc-kube-api-access-gnmcw\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470435 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-tls\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.470463 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfszn\" (UniqueName: \"kubernetes.io/projected/5622c47f-366e-4649-86ad-76c631616d12-kube-api-access-qfszn\") pod \"cluster-samples-operator-665b6dd947-c2w5x\" (UID: \"5622c47f-366e-4649-86ad-76c631616d12\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.471318 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.487658 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.508913 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.528126 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.548459 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.568800 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.571598 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.571829 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh6vw\" (UniqueName: \"kubernetes.io/projected/0c78cbc4-e705-490d-b453-9b1ec8a4ca07-kube-api-access-hh6vw\") pod \"control-plane-machine-set-operator-78cbb6b69f-jq747\" (UID: \"0c78cbc4-e705-490d-b453-9b1ec8a4ca07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.571886 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-client-ca\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.571921 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ff58d142-bb1b-4d79-be30-7d8c42951e4c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.571954 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4857752f-13d0-4996-b112-6410097f9c28-service-ca-bundle\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.571992 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-client-ca\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572038 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4pth\" (UniqueName: \"kubernetes.io/projected/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-kube-api-access-z4pth\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572116 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89180294-9bf1-495b-9c50-cd89c01bcd21-serving-cert\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572185 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-config-volume\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572235 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl2p8\" (UniqueName: \"kubernetes.io/projected/aac91c41-1c11-475a-a50e-ee5183bd8219-kube-api-access-cl2p8\") pod \"multus-admission-controller-857f4d67dd-mwwmx\" (UID: \"aac91c41-1c11-475a-a50e-ee5183bd8219\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572307 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bfgc\" (UniqueName: \"kubernetes.io/projected/e396a94f-fce7-4877-b19c-29f13bf2ffea-kube-api-access-9bfgc\") pod \"dns-operator-744455d44c-4vmhr\" (UID: \"e396a94f-fce7-4877-b19c-29f13bf2ffea\") " pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572356 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6e8b69ca-c934-4baa-8957-62b6aff5babc-audit-dir\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572396 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4bf6c344-5613-4019-bc8d-5a15ad459f46-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jv7wv\" (UID: \"4bf6c344-5613-4019-bc8d-5a15ad459f46\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572431 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-encryption-config\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572464 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a81c4f43-65d8-4496-ab69-1537e2d01ba1-cert\") pod \"ingress-canary-9z4x7\" (UID: \"a81c4f43-65d8-4496-ab69-1537e2d01ba1\") " pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572496 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4bc890e-57a2-4633-88bf-cb66c90293e8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572531 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgh6p\" (UniqueName: \"kubernetes.io/projected/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-kube-api-access-mgh6p\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572564 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfszn\" (UniqueName: \"kubernetes.io/projected/5622c47f-366e-4649-86ad-76c631616d12-kube-api-access-qfszn\") pod \"cluster-samples-operator-665b6dd947-c2w5x\" (UID: \"5622c47f-366e-4649-86ad-76c631616d12\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572595 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-image-import-ca\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572624 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572653 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572683 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnmcw\" (UniqueName: \"kubernetes.io/projected/6e8b69ca-c934-4baa-8957-62b6aff5babc-kube-api-access-gnmcw\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572714 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572745 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-bound-sa-token\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572781 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq5vn\" (UniqueName: \"kubernetes.io/projected/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-kube-api-access-mq5vn\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572813 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6f19ec5-9530-4336-b19c-fa51a40e7dea-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572845 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqc26\" (UniqueName: \"kubernetes.io/projected/addd2119-280d-4e65-94b2-82b42ccadb70-kube-api-access-dqc26\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572878 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5glp\" (UniqueName: \"kubernetes.io/projected/89180294-9bf1-495b-9c50-cd89c01bcd21-kube-api-access-j5glp\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572912 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7z55\" (UniqueName: \"kubernetes.io/projected/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-kube-api-access-x7z55\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572942 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/418efd12-f21a-4866-abb0-84bf3ea7d929-config\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.572986 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/addd2119-280d-4e65-94b2-82b42ccadb70-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573016 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rng7m\" (UniqueName: \"kubernetes.io/projected/4857752f-13d0-4996-b112-6410097f9c28-kube-api-access-rng7m\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573063 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-trusted-ca\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573127 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-config\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573159 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573191 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/89305210-320b-43d8-97ef-12a7809e9f73-signing-key\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573232 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4024807b-080e-4f06-a78e-021f46ec69af-serving-cert\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573261 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e396a94f-fce7-4877-b19c-29f13bf2ffea-metrics-tls\") pod \"dns-operator-744455d44c-4vmhr\" (UID: \"e396a94f-fce7-4877-b19c-29f13bf2ffea\") " pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573291 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-trusted-ca-bundle\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573320 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-client\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573352 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/157285ab-7f36-4042-a4ed-b975c55d7f27-proxy-tls\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573383 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c78cbc4-e705-490d-b453-9b1ec8a4ca07-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-jq747\" (UID: \"0c78cbc4-e705-490d-b453-9b1ec8a4ca07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573419 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ff58d142-bb1b-4d79-be30-7d8c42951e4c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573452 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573484 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r779f\" (UniqueName: \"kubernetes.io/projected/89305210-320b-43d8-97ef-12a7809e9f73-kube-api-access-r779f\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573514 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msc9h\" (UniqueName: \"kubernetes.io/projected/4bf6c344-5613-4019-bc8d-5a15ad459f46-kube-api-access-msc9h\") pod \"package-server-manager-789f6589d5-jv7wv\" (UID: \"4bf6c344-5613-4019-bc8d-5a15ad459f46\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573545 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsd8w\" (UniqueName: \"kubernetes.io/projected/157285ab-7f36-4042-a4ed-b975c55d7f27-kube-api-access-xsd8w\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573598 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b02e5190-b670-4ec4-824f-a4f18cf79e33-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573632 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573681 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-etcd-serving-ca\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573724 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-config\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573756 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573789 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-serving-cert\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573822 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpxlb\" (UniqueName: \"kubernetes.io/projected/ff58d142-bb1b-4d79-be30-7d8c42951e4c-kube-api-access-dpxlb\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573851 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-serving-cert\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573881 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-config\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573915 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573947 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.573975 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-certs\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574024 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-service-ca\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574054 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574130 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d19555df-603d-4db0-9ea2-9a473a55380d-trusted-ca\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574163 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjtwl\" (UniqueName: \"kubernetes.io/projected/b02e5190-b670-4ec4-824f-a4f18cf79e33-kube-api-access-qjtwl\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574200 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-machine-approver-tls\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574232 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6051c8-2283-488a-8196-9a331c8ee74c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574265 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-etcd-client\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574296 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-dir\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574329 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-metrics-tls\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.574373 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.074344582 +0000 UTC m=+140.643072341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574429 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-metrics-tls\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574463 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-metrics-certs\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574497 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwds8\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-kube-api-access-qwds8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574522 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-encryption-config\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574548 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/97f23fc8-1289-47c6-8bfc-49c7a338064d-node-pullsecrets\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574570 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8k98\" (UniqueName: \"kubernetes.io/projected/cfca5304-d886-40b0-93ea-cb412ba053f9-kube-api-access-z8k98\") pod \"downloads-7954f5f757-kcrwt\" (UID: \"cfca5304-d886-40b0-93ea-cb412ba053f9\") " pod="openshift-console/downloads-7954f5f757-kcrwt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574593 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-serving-cert\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574615 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574639 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f8c7cf50-5af5-488b-a97b-c3452513d570-apiservice-cert\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574666 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-auth-proxy-config\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574693 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq27r\" (UniqueName: \"kubernetes.io/projected/8269ba4f-674f-4886-aff2-1474500e1c38-kube-api-access-rq27r\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574714 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/89305210-320b-43d8-97ef-12a7809e9f73-signing-cabundle\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574735 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rzhk\" (UniqueName: \"kubernetes.io/projected/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-kube-api-access-9rzhk\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574821 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574844 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-console-config\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574868 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-node-bootstrap-token\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574894 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdsqg\" (UniqueName: \"kubernetes.io/projected/95f9d479-e9b9-4086-8792-83625bfaff6e-kube-api-access-vdsqg\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574918 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25nxh\" (UniqueName: \"kubernetes.io/projected/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-kube-api-access-25nxh\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574954 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkr5n\" (UniqueName: \"kubernetes.io/projected/a81c4f43-65d8-4496-ab69-1537e2d01ba1-kube-api-access-lkr5n\") pod \"ingress-canary-9z4x7\" (UID: \"a81c4f43-65d8-4496-ab69-1537e2d01ba1\") " pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.574981 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5ml6\" (UniqueName: \"kubernetes.io/projected/4024807b-080e-4f06-a78e-021f46ec69af-kube-api-access-f5ml6\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575002 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f6f19ec5-9530-4336-b19c-fa51a40e7dea-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575025 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6051c8-2283-488a-8196-9a331c8ee74c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575044 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-srv-cert\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575065 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8l2q\" (UniqueName: \"kubernetes.io/projected/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-kube-api-access-n8l2q\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575105 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-trusted-ca\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575130 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-serving-cert\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575156 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-mountpoint-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575177 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dad5e909-9da1-44cf-9369-f0c771794006-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575199 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-tls\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575223 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-policies\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575257 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f8c7cf50-5af5-488b-a97b-c3452513d570-tmpfs\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575278 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-proxy-tls\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575320 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-config\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575346 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ttx7\" (UniqueName: \"kubernetes.io/projected/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-kube-api-access-6ttx7\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575368 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv2wn\" (UniqueName: \"kubernetes.io/projected/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-kube-api-access-wv2wn\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575389 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d6051c8-2283-488a-8196-9a331c8ee74c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575427 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqq5v\" (UniqueName: \"kubernetes.io/projected/5653fd8a-02c3-4169-8197-d9b2b5cd9086-kube-api-access-dqq5v\") pod \"migrator-59844c95c7-2z2nc\" (UID: \"5653fd8a-02c3-4169-8197-d9b2b5cd9086\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575464 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-default-certificate\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575492 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575515 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/97f23fc8-1289-47c6-8bfc-49c7a338064d-audit-dir\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575538 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575559 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-csi-data-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575580 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-config-volume\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575609 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575631 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/418efd12-f21a-4866-abb0-84bf3ea7d929-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575654 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-service-ca\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575674 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575696 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg584\" (UniqueName: \"kubernetes.io/projected/172d0968-1bd7-48d4-9bcd-62590bead86c-kube-api-access-tg584\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575719 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aac91c41-1c11-475a-a50e-ee5183bd8219-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mwwmx\" (UID: \"aac91c41-1c11-475a-a50e-ee5183bd8219\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575741 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5ssc\" (UniqueName: \"kubernetes.io/projected/dad5e909-9da1-44cf-9369-f0c771794006-kube-api-access-v5ssc\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575762 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-images\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575786 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-service-ca-bundle\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575808 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-serving-cert\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575836 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-audit-policies\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575862 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/157285ab-7f36-4042-a4ed-b975c55d7f27-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575882 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-registration-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575906 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575929 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f8c7cf50-5af5-488b-a97b-c3452513d570-webhook-cert\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575948 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-stats-auth\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.575981 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4bc890e-57a2-4633-88bf-cb66c90293e8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576004 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576026 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b02e5190-b670-4ec4-824f-a4f18cf79e33-images\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576050 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5vl5\" (UniqueName: \"kubernetes.io/projected/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-kube-api-access-v5vl5\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576073 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576115 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-secret-volume\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576136 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpnp7\" (UniqueName: \"kubernetes.io/projected/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-kube-api-access-dpnp7\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576159 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-ca\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576181 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6bsx\" (UniqueName: \"kubernetes.io/projected/5d677c42-66b0-47b9-904a-e2ef1049806e-kube-api-access-s6bsx\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576205 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/418efd12-f21a-4866-abb0-84bf3ea7d929-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576230 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-config\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576251 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx2fg\" (UniqueName: \"kubernetes.io/projected/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-kube-api-access-gx2fg\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576277 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-socket-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576298 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576323 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576346 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-plugins-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576371 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkgtw\" (UniqueName: \"kubernetes.io/projected/97f23fc8-1289-47c6-8bfc-49c7a338064d-kube-api-access-kkgtw\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576395 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ff58d142-bb1b-4d79-be30-7d8c42951e4c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576420 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-audit\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576441 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txb9q\" (UniqueName: \"kubernetes.io/projected/f8c7cf50-5af5-488b-a97b-c3452513d570-kube-api-access-txb9q\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576467 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d19555df-603d-4db0-9ea2-9a473a55380d-config\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576488 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b02e5190-b670-4ec4-824f-a4f18cf79e33-config\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576513 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4024807b-080e-4f06-a78e-021f46ec69af-available-featuregates\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576537 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d19555df-603d-4db0-9ea2-9a473a55380d-serving-cert\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576559 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-serving-cert\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576570 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-trusted-ca\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576943 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-config\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.577634 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-client-ca\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.577927 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-etcd-serving-ca\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.578583 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6e8b69ca-c934-4baa-8957-62b6aff5babc-audit-dir\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.578629 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-config\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.579410 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-client-ca\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.582698 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.585840 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.586277 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.586464 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d19555df-603d-4db0-9ea2-9a473a55380d-trusted-ca\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.586856 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4bc890e-57a2-4633-88bf-cb66c90293e8-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.588172 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589218 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-dir\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.576580 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/addd2119-280d-4e65-94b2-82b42ccadb70-srv-cert\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589357 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-certificates\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589428 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-etcd-client\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589498 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589544 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dad5e909-9da1-44cf-9369-f0c771794006-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589617 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-oauth-serving-cert\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589691 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtqwc\" (UniqueName: \"kubernetes.io/projected/d19555df-603d-4db0-9ea2-9a473a55380d-kube-api-access-gtqwc\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589732 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-oauth-config\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589798 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d677c42-66b0-47b9-904a-e2ef1049806e-serving-cert\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589871 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589915 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5622c47f-366e-4649-86ad-76c631616d12-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-c2w5x\" (UID: \"5622c47f-366e-4649-86ad-76c631616d12\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.589990 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-config\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.590052 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f19ec5-9530-4336-b19c-fa51a40e7dea-config\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.590126 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-auth-proxy-config\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.590149 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4bc890e-57a2-4633-88bf-cb66c90293e8-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.590166 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89180294-9bf1-495b-9c50-cd89c01bcd21-config\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.590567 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-config\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.590816 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.590841 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-ca\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.591552 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.594048 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/97f23fc8-1289-47c6-8bfc-49c7a338064d-node-pullsecrets\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.594281 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-certificates\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.594673 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-policies\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.595823 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ff58d142-bb1b-4d79-be30-7d8c42951e4c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.596782 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-image-import-ca\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.596985 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d19555df-603d-4db0-9ea2-9a473a55380d-config\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.598315 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4024807b-080e-4f06-a78e-021f46ec69af-available-featuregates\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.600241 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/97f23fc8-1289-47c6-8bfc-49c7a338064d-audit\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.600697 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-auth-proxy-config\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.600936 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-etcd-client\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.601078 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.101036665 +0000 UTC m=+140.669764444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.601248 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.601520 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-machine-approver-tls\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.601923 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4024807b-080e-4f06-a78e-021f46ec69af-serving-cert\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.602216 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-console-config\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.603290 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-audit-policies\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.603572 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-config\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.608283 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-service-ca-bundle\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.608689 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-service-ca\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.609340 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.609836 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-config\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.610038 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/5622c47f-366e-4649-86ad-76c631616d12-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-c2w5x\" (UID: \"5622c47f-366e-4649-86ad-76c631616d12\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.610385 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-serving-cert\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.610891 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.612414 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-oauth-serving-cert\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.613011 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d19555df-603d-4db0-9ea2-9a473a55380d-serving-cert\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.613295 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.613661 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-serving-cert\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.614013 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-tls\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.614553 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-encryption-config\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.614981 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-oauth-config\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.616322 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d677c42-66b0-47b9-904a-e2ef1049806e-config\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.616972 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6e8b69ca-c934-4baa-8957-62b6aff5babc-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.617329 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/97f23fc8-1289-47c6-8bfc-49c7a338064d-audit-dir\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.622709 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-serving-cert\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.622862 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.623029 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.623182 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.623488 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.623557 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-serving-cert\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.623893 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-trusted-ca-bundle\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.623821 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d677c42-66b0-47b9-904a-e2ef1049806e-etcd-client\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.624179 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-serving-cert\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.624575 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-service-ca\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.624645 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/ff58d142-bb1b-4d79-be30-7d8c42951e4c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.624722 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/97f23fc8-1289-47c6-8bfc-49c7a338064d-etcd-client\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.625255 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-serving-cert\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.625352 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.625981 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.626612 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d677c42-66b0-47b9-904a-e2ef1049806e-serving-cert\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.628172 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.628434 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.629709 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6e8b69ca-c934-4baa-8957-62b6aff5babc-encryption-config\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.632020 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e396a94f-fce7-4877-b19c-29f13bf2ffea-metrics-tls\") pod \"dns-operator-744455d44c-4vmhr\" (UID: \"e396a94f-fce7-4877-b19c-29f13bf2ffea\") " pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.649038 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.667521 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.688609 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.690977 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691173 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aac91c41-1c11-475a-a50e-ee5183bd8219-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mwwmx\" (UID: \"aac91c41-1c11-475a-a50e-ee5183bd8219\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691211 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5ssc\" (UniqueName: \"kubernetes.io/projected/dad5e909-9da1-44cf-9369-f0c771794006-kube-api-access-v5ssc\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691234 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-images\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691256 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/157285ab-7f36-4042-a4ed-b975c55d7f27-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691278 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-registration-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691303 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f8c7cf50-5af5-488b-a97b-c3452513d570-webhook-cert\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691324 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-stats-auth\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691345 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b02e5190-b670-4ec4-824f-a4f18cf79e33-images\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.691386 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.191344976 +0000 UTC m=+140.760072735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691452 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5vl5\" (UniqueName: \"kubernetes.io/projected/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-kube-api-access-v5vl5\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691522 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-secret-volume\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691557 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpnp7\" (UniqueName: \"kubernetes.io/projected/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-kube-api-access-dpnp7\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691601 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/418efd12-f21a-4866-abb0-84bf3ea7d929-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691636 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx2fg\" (UniqueName: \"kubernetes.io/projected/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-kube-api-access-gx2fg\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691671 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-socket-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691702 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691736 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-plugins-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691784 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txb9q\" (UniqueName: \"kubernetes.io/projected/f8c7cf50-5af5-488b-a97b-c3452513d570-kube-api-access-txb9q\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691820 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b02e5190-b670-4ec4-824f-a4f18cf79e33-config\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691850 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/addd2119-280d-4e65-94b2-82b42ccadb70-srv-cert\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691876 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dad5e909-9da1-44cf-9369-f0c771794006-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691923 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691959 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f19ec5-9530-4336-b19c-fa51a40e7dea-config\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.691984 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-auth-proxy-config\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.692006 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89180294-9bf1-495b-9c50-cd89c01bcd21-config\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.692237 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b02e5190-b670-4ec4-824f-a4f18cf79e33-images\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.692365 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-plugins-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.693157 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/157285ab-7f36-4042-a4ed-b975c55d7f27-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.693202 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-registration-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.693671 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-socket-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.694026 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b02e5190-b670-4ec4-824f-a4f18cf79e33-config\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.694447 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-auth-proxy-config\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.694797 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.692031 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh6vw\" (UniqueName: \"kubernetes.io/projected/0c78cbc4-e705-490d-b453-9b1ec8a4ca07-kube-api-access-hh6vw\") pod \"control-plane-machine-set-operator-78cbb6b69f-jq747\" (UID: \"0c78cbc4-e705-490d-b453-9b1ec8a4ca07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.694974 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4857752f-13d0-4996-b112-6410097f9c28-service-ca-bundle\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695012 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89180294-9bf1-495b-9c50-cd89c01bcd21-serving-cert\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695032 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-config-volume\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695077 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4bf6c344-5613-4019-bc8d-5a15ad459f46-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jv7wv\" (UID: \"4bf6c344-5613-4019-bc8d-5a15ad459f46\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695124 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl2p8\" (UniqueName: \"kubernetes.io/projected/aac91c41-1c11-475a-a50e-ee5183bd8219-kube-api-access-cl2p8\") pod \"multus-admission-controller-857f4d67dd-mwwmx\" (UID: \"aac91c41-1c11-475a-a50e-ee5183bd8219\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695150 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a81c4f43-65d8-4496-ab69-1537e2d01ba1-cert\") pod \"ingress-canary-9z4x7\" (UID: \"a81c4f43-65d8-4496-ab69-1537e2d01ba1\") " pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695175 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgh6p\" (UniqueName: \"kubernetes.io/projected/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-kube-api-access-mgh6p\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695208 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695243 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6f19ec5-9530-4336-b19c-fa51a40e7dea-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695277 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqc26\" (UniqueName: \"kubernetes.io/projected/addd2119-280d-4e65-94b2-82b42ccadb70-kube-api-access-dqc26\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695302 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5glp\" (UniqueName: \"kubernetes.io/projected/89180294-9bf1-495b-9c50-cd89c01bcd21-kube-api-access-j5glp\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695324 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7z55\" (UniqueName: \"kubernetes.io/projected/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-kube-api-access-x7z55\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695346 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/418efd12-f21a-4866-abb0-84bf3ea7d929-config\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695372 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/addd2119-280d-4e65-94b2-82b42ccadb70-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695412 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/89305210-320b-43d8-97ef-12a7809e9f73-signing-key\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695433 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rng7m\" (UniqueName: \"kubernetes.io/projected/4857752f-13d0-4996-b112-6410097f9c28-kube-api-access-rng7m\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695489 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/157285ab-7f36-4042-a4ed-b975c55d7f27-proxy-tls\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695512 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c78cbc4-e705-490d-b453-9b1ec8a4ca07-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-jq747\" (UID: \"0c78cbc4-e705-490d-b453-9b1ec8a4ca07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695538 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r779f\" (UniqueName: \"kubernetes.io/projected/89305210-320b-43d8-97ef-12a7809e9f73-kube-api-access-r779f\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695560 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msc9h\" (UniqueName: \"kubernetes.io/projected/4bf6c344-5613-4019-bc8d-5a15ad459f46-kube-api-access-msc9h\") pod \"package-server-manager-789f6589d5-jv7wv\" (UID: \"4bf6c344-5613-4019-bc8d-5a15ad459f46\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695584 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsd8w\" (UniqueName: \"kubernetes.io/projected/157285ab-7f36-4042-a4ed-b975c55d7f27-kube-api-access-xsd8w\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695607 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b02e5190-b670-4ec4-824f-a4f18cf79e33-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695629 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695658 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695705 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695733 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-certs\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695771 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjtwl\" (UniqueName: \"kubernetes.io/projected/b02e5190-b670-4ec4-824f-a4f18cf79e33-kube-api-access-qjtwl\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695796 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6051c8-2283-488a-8196-9a331c8ee74c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.695821 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-metrics-tls\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696578 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-metrics-tls\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696603 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-metrics-certs\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696640 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f8c7cf50-5af5-488b-a97b-c3452513d570-apiservice-cert\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696680 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq27r\" (UniqueName: \"kubernetes.io/projected/8269ba4f-674f-4886-aff2-1474500e1c38-kube-api-access-rq27r\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696701 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/89305210-320b-43d8-97ef-12a7809e9f73-signing-cabundle\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696725 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rzhk\" (UniqueName: \"kubernetes.io/projected/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-kube-api-access-9rzhk\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696749 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-node-bootstrap-token\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696777 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696810 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkr5n\" (UniqueName: \"kubernetes.io/projected/a81c4f43-65d8-4496-ab69-1537e2d01ba1-kube-api-access-lkr5n\") pod \"ingress-canary-9z4x7\" (UID: \"a81c4f43-65d8-4496-ab69-1537e2d01ba1\") " pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696842 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f6f19ec5-9530-4336-b19c-fa51a40e7dea-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696863 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6051c8-2283-488a-8196-9a331c8ee74c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696883 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-srv-cert\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696913 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8l2q\" (UniqueName: \"kubernetes.io/projected/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-kube-api-access-n8l2q\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696954 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-trusted-ca\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696979 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1d6051c8-2283-488a-8196-9a331c8ee74c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696984 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-mountpoint-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.697036 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dad5e909-9da1-44cf-9369-f0c771794006-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.697062 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f8c7cf50-5af5-488b-a97b-c3452513d570-tmpfs\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696509 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-stats-auth\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.697038 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-mountpoint-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.696273 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4857752f-13d0-4996-b112-6410097f9c28-service-ca-bundle\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.697715 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dad5e909-9da1-44cf-9369-f0c771794006-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.698178 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.198164166 +0000 UTC m=+140.766891925 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.698914 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/418efd12-f21a-4866-abb0-84bf3ea7d929-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699267 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-proxy-tls\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699376 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ttx7\" (UniqueName: \"kubernetes.io/projected/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-kube-api-access-6ttx7\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699466 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqq5v\" (UniqueName: \"kubernetes.io/projected/5653fd8a-02c3-4169-8197-d9b2b5cd9086-kube-api-access-dqq5v\") pod \"migrator-59844c95c7-2z2nc\" (UID: \"5653fd8a-02c3-4169-8197-d9b2b5cd9086\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699504 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d6051c8-2283-488a-8196-9a331c8ee74c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699526 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-default-certificate\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699548 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-csi-data-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699568 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-config-volume\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699591 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/418efd12-f21a-4866-abb0-84bf3ea7d929-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699721 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-metrics-tls\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699840 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b02e5190-b670-4ec4-824f-a4f18cf79e33-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.699846 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/8269ba4f-674f-4886-aff2-1474500e1c38-csi-data-dir\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.700221 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f8c7cf50-5af5-488b-a97b-c3452513d570-tmpfs\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.700622 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dad5e909-9da1-44cf-9369-f0c771794006-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.701603 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-trusted-ca\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.702971 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-default-certificate\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.703599 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d6051c8-2283-488a-8196-9a331c8ee74c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.703883 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.704674 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4857752f-13d0-4996-b112-6410097f9c28-metrics-certs\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.707164 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.727182 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.747540 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.761173 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6f19ec5-9530-4336-b19c-fa51a40e7dea-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.766984 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.775288 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6f19ec5-9530-4336-b19c-fa51a40e7dea-config\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.787804 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.796958 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/418efd12-f21a-4866-abb0-84bf3ea7d929-config\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.800334 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.800556 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.300508611 +0000 UTC m=+140.869236420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.801399 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.801915 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.30188705 +0000 UTC m=+140.870614829 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.809073 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.827936 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.840605 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c78cbc4-e705-490d-b453-9b1ec8a4ca07-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-jq747\" (UID: \"0c78cbc4-e705-490d-b453-9b1ec8a4ca07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.848553 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.853544 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-images\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.867443 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.888316 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.902800 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.903533 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.403487245 +0000 UTC m=+140.972215034 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.903729 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:49 crc kubenswrapper[4634]: E0929 13:46:49.904306 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.404280817 +0000 UTC m=+140.973008596 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.906239 4634 request.go:700] Waited for 1.01634897s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmco-proxy-tls&limit=500&resourceVersion=0 Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.908385 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.913252 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-proxy-tls\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.927424 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.949873 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.962064 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-srv-cert\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.967915 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.977270 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-secret-volume\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.981927 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.982484 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/addd2119-280d-4e65-94b2-82b42ccadb70-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:49 crc kubenswrapper[4634]: I0929 13:46:49.992141 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.004819 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.005224 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.505193313 +0000 UTC m=+141.073921102 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.008343 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.027823 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.048193 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.068845 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.081775 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.096749 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.107912 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.107990 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.108242 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.108605 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.608591298 +0000 UTC m=+141.177319047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.128255 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.136890 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f8c7cf50-5af5-488b-a97b-c3452513d570-webhook-cert\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.140417 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f8c7cf50-5af5-488b-a97b-c3452513d570-apiservice-cert\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.149674 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.158988 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/4bf6c344-5613-4019-bc8d-5a15ad459f46-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-jv7wv\" (UID: \"4bf6c344-5613-4019-bc8d-5a15ad459f46\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.169365 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.178888 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/157285ab-7f36-4042-a4ed-b975c55d7f27-proxy-tls\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.188508 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.209171 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.209313 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.709279438 +0000 UTC m=+141.278007227 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.209375 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.210162 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.210605 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.710581694 +0000 UTC m=+141.279309473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.215369 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89180294-9bf1-495b-9c50-cd89c01bcd21-config\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.228194 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.248130 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.268770 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.272602 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89180294-9bf1-495b-9c50-cd89c01bcd21-serving-cert\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.289730 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.307600 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.311912 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.312070 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.812038795 +0000 UTC m=+141.380766574 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.312409 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.312810 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.812793676 +0000 UTC m=+141.381521465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.330072 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.349201 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.358487 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-config-volume\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.368889 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.382443 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-metrics-tls\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.388837 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.390854 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-config-volume\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.407246 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.413980 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.414173 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.914132404 +0000 UTC m=+141.482860183 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.414639 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.415075 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:50.91505514 +0000 UTC m=+141.483782919 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.420667 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/addd2119-280d-4e65-94b2-82b42ccadb70-srv-cert\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.428713 4634 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.447860 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.468277 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.488015 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.508674 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.517602 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.01757504 +0000 UTC m=+141.586302829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.517450 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.518552 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.519150 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.019026041 +0000 UTC m=+141.587753830 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.528202 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.541796 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/89305210-320b-43d8-97ef-12a7809e9f73-signing-key\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.547875 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.568792 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.580265 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/89305210-320b-43d8-97ef-12a7809e9f73-signing-cabundle\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.587941 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.607925 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.616951 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/aac91c41-1c11-475a-a50e-ee5183bd8219-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-mwwmx\" (UID: \"aac91c41-1c11-475a-a50e-ee5183bd8219\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.621315 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.622006 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.121970752 +0000 UTC m=+141.690698541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.622477 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.622911 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.122889468 +0000 UTC m=+141.691617257 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.628547 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.640790 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-certs\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.648588 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.668636 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.681946 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-node-bootstrap-token\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.687748 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.697688 4634 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.697802 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a81c4f43-65d8-4496-ab69-1537e2d01ba1-cert podName:a81c4f43-65d8-4496-ab69-1537e2d01ba1 nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.19777365 +0000 UTC m=+141.766501439 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a81c4f43-65d8-4496-ab69-1537e2d01ba1-cert") pod "ingress-canary-9z4x7" (UID: "a81c4f43-65d8-4496-ab69-1537e2d01ba1") : failed to sync secret cache: timed out waiting for the condition Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.708440 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.724431 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.725607 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.225586303 +0000 UTC m=+141.794314062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.734984 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.747250 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.795649 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd8zh\" (UniqueName: \"kubernetes.io/projected/c722e944-c116-40fe-a812-3f7dad194f1a-kube-api-access-fd8zh\") pod \"openshift-apiserver-operator-796bbdcf4f-7hccx\" (UID: \"c722e944-c116-40fe-a812-3f7dad194f1a\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.827774 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.828401 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.328377532 +0000 UTC m=+141.897105321 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.837162 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ff58d142-bb1b-4d79-be30-7d8c42951e4c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.859353 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4pth\" (UniqueName: \"kubernetes.io/projected/c0213c2c-abcc-42f6-830f-0dc1a0d2443f-kube-api-access-z4pth\") pod \"machine-approver-56656f9798-p5tq6\" (UID: \"c0213c2c-abcc-42f6-830f-0dc1a0d2443f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.874035 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.875206 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bfgc\" (UniqueName: \"kubernetes.io/projected/e396a94f-fce7-4877-b19c-29f13bf2ffea-kube-api-access-9bfgc\") pod \"dns-operator-744455d44c-4vmhr\" (UID: \"e396a94f-fce7-4877-b19c-29f13bf2ffea\") " pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.885722 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfszn\" (UniqueName: \"kubernetes.io/projected/5622c47f-366e-4649-86ad-76c631616d12-kube-api-access-qfszn\") pod \"cluster-samples-operator-665b6dd947-c2w5x\" (UID: \"5622c47f-366e-4649-86ad-76c631616d12\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.890437 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.901499 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnmcw\" (UniqueName: \"kubernetes.io/projected/6e8b69ca-c934-4baa-8957-62b6aff5babc-kube-api-access-gnmcw\") pod \"apiserver-7bbb656c7d-vbq68\" (UID: \"6e8b69ca-c934-4baa-8957-62b6aff5babc\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:50 crc kubenswrapper[4634]: W0929 13:46:50.926229 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0213c2c_abcc_42f6_830f_0dc1a0d2443f.slice/crio-552416d1f53b9ad9c4ccb8177782b2f95386d33fd5fa99a13c3e6ba5e4e4e447 WatchSource:0}: Error finding container 552416d1f53b9ad9c4ccb8177782b2f95386d33fd5fa99a13c3e6ba5e4e4e447: Status 404 returned error can't find the container with id 552416d1f53b9ad9c4ccb8177782b2f95386d33fd5fa99a13c3e6ba5e4e4e447 Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.926278 4634 request.go:700] Waited for 1.335628259s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/registry/token Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.928415 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:50 crc kubenswrapper[4634]: E0929 13:46:50.929226 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.429212646 +0000 UTC m=+141.997940395 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.935681 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6bsx\" (UniqueName: \"kubernetes.io/projected/5d677c42-66b0-47b9-904a-e2ef1049806e-kube-api-access-s6bsx\") pod \"etcd-operator-b45778765-7tbnt\" (UID: \"5d677c42-66b0-47b9-904a-e2ef1049806e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.943043 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-bound-sa-token\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.953292 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.962697 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq5vn\" (UniqueName: \"kubernetes.io/projected/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-kube-api-access-mq5vn\") pod \"controller-manager-879f6c89f-7mqhd\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:50 crc kubenswrapper[4634]: I0929 13:46:50.982873 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwds8\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-kube-api-access-qwds8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.006788 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.020466 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkgtw\" (UniqueName: \"kubernetes.io/projected/97f23fc8-1289-47c6-8bfc-49c7a338064d-kube-api-access-kkgtw\") pod \"apiserver-76f77b778f-rgfdh\" (UID: \"97f23fc8-1289-47c6-8bfc-49c7a338064d\") " pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.021355 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtqwc\" (UniqueName: \"kubernetes.io/projected/d19555df-603d-4db0-9ea2-9a473a55380d-kube-api-access-gtqwc\") pod \"console-operator-58897d9998-64xkr\" (UID: \"d19555df-603d-4db0-9ea2-9a473a55380d\") " pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.023578 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.032796 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.032959 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.033317 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.53330122 +0000 UTC m=+142.102028989 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.038162 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.048415 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.055465 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8k98\" (UniqueName: \"kubernetes.io/projected/cfca5304-d886-40b0-93ea-cb412ba053f9-kube-api-access-z8k98\") pod \"downloads-7954f5f757-kcrwt\" (UID: \"cfca5304-d886-40b0-93ea-cb412ba053f9\") " pod="openshift-console/downloads-7954f5f757-kcrwt" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.067910 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.080429 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5ml6\" (UniqueName: \"kubernetes.io/projected/4024807b-080e-4f06-a78e-021f46ec69af-kube-api-access-f5ml6\") pod \"openshift-config-operator-7777fb866f-md5b8\" (UID: \"4024807b-080e-4f06-a78e-021f46ec69af\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.081510 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpxlb\" (UniqueName: \"kubernetes.io/projected/ff58d142-bb1b-4d79-be30-7d8c42951e4c-kube-api-access-dpxlb\") pod \"cluster-image-registry-operator-dc59b4c8b-mj4rw\" (UID: \"ff58d142-bb1b-4d79-be30-7d8c42951e4c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.104940 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg584\" (UniqueName: \"kubernetes.io/projected/172d0968-1bd7-48d4-9bcd-62590bead86c-kube-api-access-tg584\") pod \"oauth-openshift-558db77b4-vvk4f\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.131069 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdsqg\" (UniqueName: \"kubernetes.io/projected/95f9d479-e9b9-4086-8792-83625bfaff6e-kube-api-access-vdsqg\") pod \"console-f9d7485db-5nvq7\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.133859 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.134450 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.634432311 +0000 UTC m=+142.203160060 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.148596 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-kcrwt" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.151126 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25nxh\" (UniqueName: \"kubernetes.io/projected/b1425abd-681d-4fb7-a09a-b2ebb679ffa7-kube-api-access-25nxh\") pod \"authentication-operator-69f744f599-pccc9\" (UID: \"b1425abd-681d-4fb7-a09a-b2ebb679ffa7\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.166274 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv2wn\" (UniqueName: \"kubernetes.io/projected/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-kube-api-access-wv2wn\") pod \"route-controller-manager-6576b87f9c-6srb6\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.184313 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5ssc\" (UniqueName: \"kubernetes.io/projected/dad5e909-9da1-44cf-9369-f0c771794006-kube-api-access-v5ssc\") pod \"kube-storage-version-migrator-operator-b67b599dd-vq2tm\" (UID: \"dad5e909-9da1-44cf-9369-f0c771794006\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.203684 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.209914 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5vl5\" (UniqueName: \"kubernetes.io/projected/5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf-kube-api-access-v5vl5\") pod \"dns-default-zjrr2\" (UID: \"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf\") " pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.234766 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpnp7\" (UniqueName: \"kubernetes.io/projected/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-kube-api-access-dpnp7\") pod \"marketplace-operator-79b997595-9v7l5\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.235450 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a81c4f43-65d8-4496-ab69-1537e2d01ba1-cert\") pod \"ingress-canary-9z4x7\" (UID: \"a81c4f43-65d8-4496-ab69-1537e2d01ba1\") " pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.235572 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.235862 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.735849361 +0000 UTC m=+142.304577110 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.242384 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a81c4f43-65d8-4496-ab69-1537e2d01ba1-cert\") pod \"ingress-canary-9z4x7\" (UID: \"a81c4f43-65d8-4496-ab69-1537e2d01ba1\") " pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.253303 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx2fg\" (UniqueName: \"kubernetes.io/projected/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-kube-api-access-gx2fg\") pod \"collect-profiles-29319225-lj7nd\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.263484 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.268219 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-4vmhr"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.273793 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txb9q\" (UniqueName: \"kubernetes.io/projected/f8c7cf50-5af5-488b-a97b-c3452513d570-kube-api-access-txb9q\") pod \"packageserver-d55dfcdfc-bhfql\" (UID: \"f8c7cf50-5af5-488b-a97b-c3452513d570\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.276260 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.284552 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.289778 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.291821 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.300776 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh6vw\" (UniqueName: \"kubernetes.io/projected/0c78cbc4-e705-490d-b453-9b1ec8a4ca07-kube-api-access-hh6vw\") pod \"control-plane-machine-set-operator-78cbb6b69f-jq747\" (UID: \"0c78cbc4-e705-490d-b453-9b1ec8a4ca07\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" Sep 29 13:46:51 crc kubenswrapper[4634]: W0929 13:46:51.301580 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode396a94f_fce7_4877_b19c_29f13bf2ffea.slice/crio-83b6930b16e2a256d12e8358b0282d07d2f2f3f5896d8860dfed46c62a354df1 WatchSource:0}: Error finding container 83b6930b16e2a256d12e8358b0282d07d2f2f3f5896d8860dfed46c62a354df1: Status 404 returned error can't find the container with id 83b6930b16e2a256d12e8358b0282d07d2f2f3f5896d8860dfed46c62a354df1 Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.314476 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.315883 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.331029 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgh6p\" (UniqueName: \"kubernetes.io/projected/b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab-kube-api-access-mgh6p\") pod \"machine-config-operator-74547568cd-hblk6\" (UID: \"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.334738 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl2p8\" (UniqueName: \"kubernetes.io/projected/aac91c41-1c11-475a-a50e-ee5183bd8219-kube-api-access-cl2p8\") pod \"multus-admission-controller-857f4d67dd-mwwmx\" (UID: \"aac91c41-1c11-475a-a50e-ee5183bd8219\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.336916 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.337312 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.837247141 +0000 UTC m=+142.405974880 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.337402 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.337862 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.837854328 +0000 UTC m=+142.406582077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.362443 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.363678 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rng7m\" (UniqueName: \"kubernetes.io/projected/4857752f-13d0-4996-b112-6410097f9c28-kube-api-access-rng7m\") pod \"router-default-5444994796-x2lkr\" (UID: \"4857752f-13d0-4996-b112-6410097f9c28\") " pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.404872 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-7tbnt"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.405578 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqc26\" (UniqueName: \"kubernetes.io/projected/addd2119-280d-4e65-94b2-82b42ccadb70-kube-api-access-dqc26\") pod \"catalog-operator-68c6474976-4zvdk\" (UID: \"addd2119-280d-4e65-94b2-82b42ccadb70\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.407272 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5glp\" (UniqueName: \"kubernetes.io/projected/89180294-9bf1-495b-9c50-cd89c01bcd21-kube-api-access-j5glp\") pod \"service-ca-operator-777779d784-v5cbt\" (UID: \"89180294-9bf1-495b-9c50-cd89c01bcd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.411669 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.414141 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.422618 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.438465 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.438899 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:51.938884247 +0000 UTC m=+142.507611996 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.444467 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7z55\" (UniqueName: \"kubernetes.io/projected/a1e6f156-7ac5-4f39-9b41-6fd68e0396b2-kube-api-access-x7z55\") pod \"machine-config-server-8h72q\" (UID: \"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2\") " pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.444599 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.460470 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r779f\" (UniqueName: \"kubernetes.io/projected/89305210-320b-43d8-97ef-12a7809e9f73-kube-api-access-r779f\") pod \"service-ca-9c57cc56f-m4gqz\" (UID: \"89305210-320b-43d8-97ef-12a7809e9f73\") " pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.470060 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.473820 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsd8w\" (UniqueName: \"kubernetes.io/projected/157285ab-7f36-4042-a4ed-b975c55d7f27-kube-api-access-xsd8w\") pod \"machine-config-controller-84d6567774-shmnq\" (UID: \"157285ab-7f36-4042-a4ed-b975c55d7f27\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.478317 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.494066 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.502411 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.511269 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msc9h\" (UniqueName: \"kubernetes.io/projected/4bf6c344-5613-4019-bc8d-5a15ad459f46-kube-api-access-msc9h\") pod \"package-server-manager-789f6589d5-jv7wv\" (UID: \"4bf6c344-5613-4019-bc8d-5a15ad459f46\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.512143 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjtwl\" (UniqueName: \"kubernetes.io/projected/b02e5190-b670-4ec4-824f-a4f18cf79e33-kube-api-access-qjtwl\") pod \"machine-api-operator-5694c8668f-27cm7\" (UID: \"b02e5190-b670-4ec4-824f-a4f18cf79e33\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.519592 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.528245 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.528572 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mqhd"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.536862 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkr5n\" (UniqueName: \"kubernetes.io/projected/a81c4f43-65d8-4496-ab69-1537e2d01ba1-kube-api-access-lkr5n\") pod \"ingress-canary-9z4x7\" (UID: \"a81c4f43-65d8-4496-ab69-1537e2d01ba1\") " pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.540590 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.541005 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.040993436 +0000 UTC m=+142.609721185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.554964 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rgfdh"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.562210 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f6f19ec5-9530-4336-b19c-fa51a40e7dea-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-nbc4c\" (UID: \"f6f19ec5-9530-4336-b19c-fa51a40e7dea\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.565476 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.585777 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rzhk\" (UniqueName: \"kubernetes.io/projected/f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be-kube-api-access-9rzhk\") pod \"ingress-operator-5b745b69d9-gpgzb\" (UID: \"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.600673 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.601268 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq27r\" (UniqueName: \"kubernetes.io/projected/8269ba4f-674f-4886-aff2-1474500e1c38-kube-api-access-rq27r\") pod \"csi-hostpathplugin-7hz92\" (UID: \"8269ba4f-674f-4886-aff2-1474500e1c38\") " pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.601614 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.602882 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ttx7\" (UniqueName: \"kubernetes.io/projected/e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2-kube-api-access-6ttx7\") pod \"openshift-controller-manager-operator-756b6f6bc6-42nsw\" (UID: \"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.613667 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.624917 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-8h72q" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.631908 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-9z4x7" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.632795 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.640549 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/418efd12-f21a-4866-abb0-84bf3ea7d929-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8sstx\" (UID: \"418efd12-f21a-4866-abb0-84bf3ea7d929\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.641207 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.641365 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.141343986 +0000 UTC m=+142.710071735 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.641517 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.641798 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.141785879 +0000 UTC m=+142.710513628 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.647475 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqq5v\" (UniqueName: \"kubernetes.io/projected/5653fd8a-02c3-4169-8197-d9b2b5cd9086-kube-api-access-dqq5v\") pod \"migrator-59844c95c7-2z2nc\" (UID: \"5653fd8a-02c3-4169-8197-d9b2b5cd9086\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.680164 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.683934 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8l2q\" (UniqueName: \"kubernetes.io/projected/30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3-kube-api-access-n8l2q\") pod \"olm-operator-6b444d44fb-gwwls\" (UID: \"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.684297 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1d6051c8-2283-488a-8196-9a331c8ee74c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-s52wd\" (UID: \"1d6051c8-2283-488a-8196-9a331c8ee74c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.701480 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.717406 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.730138 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.737540 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.742562 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.742957 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.242942142 +0000 UTC m=+142.811669891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.752174 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.760336 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.788363 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.805891 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" event={"ID":"5d677c42-66b0-47b9-904a-e2ef1049806e","Type":"ContainerStarted","Data":"cb51332a6a7ef3d754f65a9e32901604e77fff16d5aee30a982ebbb6a7129498"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.811239 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.812339 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" event={"ID":"e396a94f-fce7-4877-b19c-29f13bf2ffea","Type":"ContainerStarted","Data":"83b6930b16e2a256d12e8358b0282d07d2f2f3f5896d8860dfed46c62a354df1"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.818395 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" event={"ID":"a83998e9-b33c-4d62-b1be-c2b8b5e6982d","Type":"ContainerStarted","Data":"65ec871351700ce9e053e5b1c371115dcf89ff825bb9001c6dab7f2ada6956ee"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.820935 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" event={"ID":"97f23fc8-1289-47c6-8bfc-49c7a338064d","Type":"ContainerStarted","Data":"b8376cf9c8753366f09e8da047eca446e4422850466331b5c20c5de7d27d0cd8"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.821957 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" event={"ID":"c722e944-c116-40fe-a812-3f7dad194f1a","Type":"ContainerStarted","Data":"22948c21288910b0c65f8b547c3497d0db7da917daae771d00312c6e094ce6a8"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.822830 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" event={"ID":"6e8b69ca-c934-4baa-8957-62b6aff5babc","Type":"ContainerStarted","Data":"e90a0ba2499c9571c270187b9bde1542e235077f0ac0d696554478e6744ac2ae"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.826837 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" event={"ID":"c0213c2c-abcc-42f6-830f-0dc1a0d2443f","Type":"ContainerStarted","Data":"ae13a6cf28d7ad558bfa4aa9abecbb43f5a08055971daec4fb762ec813345924"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.826902 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" event={"ID":"c0213c2c-abcc-42f6-830f-0dc1a0d2443f","Type":"ContainerStarted","Data":"552416d1f53b9ad9c4ccb8177782b2f95386d33fd5fa99a13c3e6ba5e4e4e447"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.829460 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" event={"ID":"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a","Type":"ContainerStarted","Data":"8565cc7407f9b6b4fb5ed0677b2fb42066f6df09a7975f16f2f80ad35b233ae3"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.830385 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" event={"ID":"5622c47f-366e-4649-86ad-76c631616d12","Type":"ContainerStarted","Data":"636f34a45578b1ae17f8f87c8569fbef24ad22c815a354e9cc8688437852f3ca"} Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.844220 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.844904 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.344892256 +0000 UTC m=+142.913620005 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.854556 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-7hz92" Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.887241 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-64xkr"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.899901 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-kcrwt"] Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.947015 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.947252 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.447228711 +0000 UTC m=+143.015956460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:51 crc kubenswrapper[4634]: I0929 13:46:51.947915 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:51 crc kubenswrapper[4634]: E0929 13:46:51.948459 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.448447356 +0000 UTC m=+143.017175105 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.010318 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.035868 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vvk4f"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.037825 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-zjrr2"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.039429 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.055981 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.056487 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.556468289 +0000 UTC m=+143.125196048 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.160862 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.161486 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.661475129 +0000 UTC m=+143.230202878 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.177023 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-md5b8"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.200844 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.237595 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-pccc9"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.262417 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.263368 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.76292427 +0000 UTC m=+143.331652019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.329128 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.368206 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.369173 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.869124033 +0000 UTC m=+143.437851782 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.376861 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-mwwmx"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.450206 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.461456 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.469825 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.490287 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk"] Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.491509 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:52.991489195 +0000 UTC m=+143.560216934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: W0929 13:46:52.558823 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb09096e0_66ea_4ec7_b5cc_d7bdae86b9ab.slice/crio-8f6cff3dc77d14b72cab71fd175a7b1dba8962dd4ebe0682ad4364dc42247d5d WatchSource:0}: Error finding container 8f6cff3dc77d14b72cab71fd175a7b1dba8962dd4ebe0682ad4364dc42247d5d: Status 404 returned error can't find the container with id 8f6cff3dc77d14b72cab71fd175a7b1dba8962dd4ebe0682ad4364dc42247d5d Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.594052 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.594373 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.094362655 +0000 UTC m=+143.663090404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: W0929 13:46:52.600581 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaac91c41_1c11_475a_a50e_ee5183bd8219.slice/crio-b09d4a6fc4189163160e09f56bd6ee0628fa6804cf28c5b86a22aac6f3c09e42 WatchSource:0}: Error finding container b09d4a6fc4189163160e09f56bd6ee0628fa6804cf28c5b86a22aac6f3c09e42: Status 404 returned error can't find the container with id b09d4a6fc4189163160e09f56bd6ee0628fa6804cf28c5b86a22aac6f3c09e42 Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.690724 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.698723 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.699019 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.199002965 +0000 UTC m=+143.767730704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.726442 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.731738 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-5nvq7"] Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.800619 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.800964 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.30094958 +0000 UTC m=+143.869677329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.844916 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-64xkr" event={"ID":"d19555df-603d-4db0-9ea2-9a473a55380d","Type":"ContainerStarted","Data":"c5446274bfb677234b0b5d51784bacd8beb5030a0fd31d33f62a5669d832ef8d"} Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.866880 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" event={"ID":"b1425abd-681d-4fb7-a09a-b2ebb679ffa7","Type":"ContainerStarted","Data":"fc09fad36eaec2e2a381c1f29aeba34d7f62c0bfdb4e91f3c2455cdf3bf89fd3"} Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.878523 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" event={"ID":"5622c47f-366e-4649-86ad-76c631616d12","Type":"ContainerStarted","Data":"077850ff9c5baad46a5fdf4253d597be673bd1ffb176fe97cb8ec799b076c7cb"} Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.902340 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:52 crc kubenswrapper[4634]: E0929 13:46:52.902891 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.402874954 +0000 UTC m=+143.971602703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.915234 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" event={"ID":"172d0968-1bd7-48d4-9bcd-62590bead86c","Type":"ContainerStarted","Data":"4a8a619db538d8b1206ec95785d30be1ac210a419d3ba0134e752dba0d905288"} Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.921120 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-x2lkr" event={"ID":"4857752f-13d0-4996-b112-6410097f9c28","Type":"ContainerStarted","Data":"47d1a69ed10ef28aa956c65dd3844d59ff85b40d3629c8a13ade8326c1cda7bb"} Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.922420 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" event={"ID":"0c78cbc4-e705-490d-b453-9b1ec8a4ca07","Type":"ContainerStarted","Data":"baae4d0e55e37c135bb4f2a35cb38edcdd38ef4ea4fda31c7f518fe9e5f3cc78"} Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.925858 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-kcrwt" event={"ID":"cfca5304-d886-40b0-93ea-cb412ba053f9","Type":"ContainerStarted","Data":"47a3643e86f216be70f3caf5ba0691a08e0df5f125620c75fa6ff94158c1ddde"} Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.965368 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" event={"ID":"a83998e9-b33c-4d62-b1be-c2b8b5e6982d","Type":"ContainerStarted","Data":"2bb07e29ba5627b9cc5e011dae0e58a8a8cba13ca9609481f67f4b81fd35f909"} Sep 29 13:46:52 crc kubenswrapper[4634]: I0929 13:46:52.974071 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.004314 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.004652 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.504640753 +0000 UTC m=+144.073368492 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.043391 4634 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7mqhd container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.043440 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" podUID="a83998e9-b33c-4d62-b1be-c2b8b5e6982d" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.076676 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" event={"ID":"c722e944-c116-40fe-a812-3f7dad194f1a","Type":"ContainerStarted","Data":"ee3e47f1cab2b43ae5bd4f92f9ddf56067df9042d7769b191c027e2ed5179ab0"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.107440 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" event={"ID":"e396a94f-fce7-4877-b19c-29f13bf2ffea","Type":"ContainerStarted","Data":"d00e004f0c988451f86b88765f38c4a13f9c8f3674dd18a6974d48f74bd3c581"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.108154 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.109155 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.609142069 +0000 UTC m=+144.177869818 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.109878 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9v7l5"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.124653 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.144992 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" event={"ID":"dad5e909-9da1-44cf-9369-f0c771794006","Type":"ContainerStarted","Data":"ce135e70054ee8924d1a36d8864b30fd5e615164c723bbeaf927ee94dacdae6d"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.148004 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" event={"ID":"addd2119-280d-4e65-94b2-82b42ccadb70","Type":"ContainerStarted","Data":"6941a7d937bd742e2dca6e6dc3e492ed68252831c43cc4d52a9864122593822a"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.168166 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.168212 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-9z4x7"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.196315 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" event={"ID":"aac91c41-1c11-475a-a50e-ee5183bd8219","Type":"ContainerStarted","Data":"b09d4a6fc4189163160e09f56bd6ee0628fa6804cf28c5b86a22aac6f3c09e42"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.204121 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" event={"ID":"4024807b-080e-4f06-a78e-021f46ec69af","Type":"ContainerStarted","Data":"81d54f5be1cac89e4c704221545f20e1c57109c9f3ed85957ae8cb20498f55af"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.209114 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.210104 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.710077425 +0000 UTC m=+144.278805174 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.213413 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" event={"ID":"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da","Type":"ContainerStarted","Data":"27533fedb174b63fb84357702cafd910b6fa99ce4e1c500ce76d066921def8c6"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.216213 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" event={"ID":"ff58d142-bb1b-4d79-be30-7d8c42951e4c","Type":"ContainerStarted","Data":"caa71441f6088db1b22555c10d7d476a96f24d2974dd419815604924217ceada"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.217426 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" event={"ID":"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab","Type":"ContainerStarted","Data":"8f6cff3dc77d14b72cab71fd175a7b1dba8962dd4ebe0682ad4364dc42247d5d"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.218683 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-8h72q" event={"ID":"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2","Type":"ContainerStarted","Data":"6913cb32fa9d55a25fb5a55cb87202830401cba1c23302db16dea95f5227ffc7"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.220286 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-zjrr2" event={"ID":"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf","Type":"ContainerStarted","Data":"e9d0452820f254c24f3723375f16cd9b09579ae3d76b5594bff7bf06aca1cae3"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.234572 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" event={"ID":"f8c7cf50-5af5-488b-a97b-c3452513d570","Type":"ContainerStarted","Data":"c700e50072308abaf9ec5a062629e006a8d4935588ddfc4ab66f516c8f929008"} Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.261911 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.281777 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-m4gqz"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.311527 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.311881 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.811866576 +0000 UTC m=+144.380594325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.387916 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" podStartSLOduration=120.38789862 podStartE2EDuration="2m0.38789862s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:53.382627564 +0000 UTC m=+143.951355313" watchObservedRunningTime="2025-09-29 13:46:53.38789862 +0000 UTC m=+143.956626369" Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.416898 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:53 crc kubenswrapper[4634]: W0929 13:46:53.417457 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89305210_320b_43d8_97ef_12a7809e9f73.slice/crio-ebfc61f5334c7c9fdea3f25eaff9dc15b5861fef5a262057fc9d6437bcc655c8 WatchSource:0}: Error finding container ebfc61f5334c7c9fdea3f25eaff9dc15b5861fef5a262057fc9d6437bcc655c8: Status 404 returned error can't find the container with id ebfc61f5334c7c9fdea3f25eaff9dc15b5861fef5a262057fc9d6437bcc655c8 Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.417645 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:53.917629286 +0000 UTC m=+144.486357035 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.444759 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-7hz92"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.459566 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-27cm7"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.514632 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.520131 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.520359 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.020318672 +0000 UTC m=+144.589046421 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.520970 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.521347 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.0213388 +0000 UTC m=+144.590066549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.597426 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.619874 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.621785 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.621953 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.121931647 +0000 UTC m=+144.690659396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.622263 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.624161 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.124150409 +0000 UTC m=+144.692878158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: W0929 13:46:53.630889 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb02e5190_b670_4ec4_824f_a4f18cf79e33.slice/crio-a4916aae5c82e536ae962c050556a84748e5f274feeae9facb8f432f2af498b6 WatchSource:0}: Error finding container a4916aae5c82e536ae962c050556a84748e5f274feeae9facb8f432f2af498b6: Status 404 returned error can't find the container with id a4916aae5c82e536ae962c050556a84748e5f274feeae9facb8f432f2af498b6 Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.635301 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-7hccx" podStartSLOduration=120.635277408 podStartE2EDuration="2m0.635277408s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:53.622653738 +0000 UTC m=+144.191381487" watchObservedRunningTime="2025-09-29 13:46:53.635277408 +0000 UTC m=+144.204005157" Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.645294 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.667150 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx"] Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.725729 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.726557 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.226530256 +0000 UTC m=+144.795258005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: W0929 13:46:53.792440 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1ca81ca_ca2a_4f83_9a40_ef8efdc1c7be.slice/crio-f2e0cec6538f2602dec8b9f31d5c657f7a99ebb88e12badeeb1ac5b7eafbafaf WatchSource:0}: Error finding container f2e0cec6538f2602dec8b9f31d5c657f7a99ebb88e12badeeb1ac5b7eafbafaf: Status 404 returned error can't find the container with id f2e0cec6538f2602dec8b9f31d5c657f7a99ebb88e12badeeb1ac5b7eafbafaf Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.827329 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.827716 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.327702529 +0000 UTC m=+144.896430278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:53 crc kubenswrapper[4634]: I0929 13:46:53.928378 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:53 crc kubenswrapper[4634]: E0929 13:46:53.928774 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.428760769 +0000 UTC m=+144.997488518 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.030006 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.030426 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.530410615 +0000 UTC m=+145.099138354 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.131567 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.131820 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.631801134 +0000 UTC m=+145.200528883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.232680 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.234809 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.733136052 +0000 UTC m=+145.301863801 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.246253 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" event={"ID":"5653fd8a-02c3-4169-8197-d9b2b5cd9086","Type":"ContainerStarted","Data":"c844a327a2c21bcd91c92cd3cd2138e339d9fb74bee5e7e6b2cd82c30c352cee"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.248237 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" event={"ID":"5d677c42-66b0-47b9-904a-e2ef1049806e","Type":"ContainerStarted","Data":"ed9ae6883caa5939280a6610e9961dd627097345fd2a57dc0d299dab7a5671e7"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.255800 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" event={"ID":"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a","Type":"ContainerStarted","Data":"4742419e87e203babd686c45b7af1120ef78475ae06837a4017280f8fc26ff81"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.255955 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.258136 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" event={"ID":"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70","Type":"ContainerStarted","Data":"f5a0bcde16799485fdf71d6e08f860f21c2ed253e5bc328dd0ba2c71f192a950"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.260401 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7hz92" event={"ID":"8269ba4f-674f-4886-aff2-1474500e1c38","Type":"ContainerStarted","Data":"90c52d41428323f101f386da253b7f5a29524fbb14aba1aa5da82a621594d0e8"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.268920 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-7tbnt" podStartSLOduration=121.268902396 podStartE2EDuration="2m1.268902396s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.268644669 +0000 UTC m=+144.837372418" watchObservedRunningTime="2025-09-29 13:46:54.268902396 +0000 UTC m=+144.837630145" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.269040 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" event={"ID":"172d0968-1bd7-48d4-9bcd-62590bead86c","Type":"ContainerStarted","Data":"d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.269859 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.271824 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" event={"ID":"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2","Type":"ContainerStarted","Data":"f98e02865e3ac7508f58427ce42c01f1306f70b64c2f7db5f60c895d08246deb"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.292415 4634 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-vvk4f container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.27:6443/healthz\": dial tcp 10.217.0.27:6443: connect: connection refused" start-of-body= Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.292476 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" podUID="172d0968-1bd7-48d4-9bcd-62590bead86c" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.27:6443/healthz\": dial tcp 10.217.0.27:6443: connect: connection refused" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.312495 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" podStartSLOduration=120.312456698 podStartE2EDuration="2m0.312456698s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.307837689 +0000 UTC m=+144.876565438" watchObservedRunningTime="2025-09-29 13:46:54.312456698 +0000 UTC m=+144.881184447" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.314557 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-8h72q" event={"ID":"a1e6f156-7ac5-4f39-9b41-6fd68e0396b2","Type":"ContainerStarted","Data":"fd8e1433811d5f9924676d1b51bbef1b74a45a3d7026ec175f97b8b868bd7edb"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.334533 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.335755 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.835735055 +0000 UTC m=+145.404462804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.339501 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" event={"ID":"c0213c2c-abcc-42f6-830f-0dc1a0d2443f","Type":"ContainerStarted","Data":"21378bbc77f8dfd4f8aab7af361e024936ff7c9279fa60ba15b161fc7e13a327"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.358711 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" podStartSLOduration=121.358694283 podStartE2EDuration="2m1.358694283s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.356454451 +0000 UTC m=+144.925182200" watchObservedRunningTime="2025-09-29 13:46:54.358694283 +0000 UTC m=+144.927422032" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.382873 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5nvq7" event={"ID":"95f9d479-e9b9-4086-8792-83625bfaff6e","Type":"ContainerStarted","Data":"44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.382918 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5nvq7" event={"ID":"95f9d479-e9b9-4086-8792-83625bfaff6e","Type":"ContainerStarted","Data":"e49358bc5e094869639e7fc5ffb4945730affc01db25fc24793dd6b87978780f"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.390982 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-8h72q" podStartSLOduration=6.390966631 podStartE2EDuration="6.390966631s" podCreationTimestamp="2025-09-29 13:46:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.390191959 +0000 UTC m=+144.958919708" watchObservedRunningTime="2025-09-29 13:46:54.390966631 +0000 UTC m=+144.959694380" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.392963 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" event={"ID":"0c78cbc4-e705-490d-b453-9b1ec8a4ca07","Type":"ContainerStarted","Data":"2cd7056f355034e7d6d71f4eeeeab19f5d55b6d603bcdf1c2a9a7eefebe55a34"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.420363 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-p5tq6" podStartSLOduration=121.420347817 podStartE2EDuration="2m1.420347817s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.419457163 +0000 UTC m=+144.988184912" watchObservedRunningTime="2025-09-29 13:46:54.420347817 +0000 UTC m=+144.989075566" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.430166 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" event={"ID":"e396a94f-fce7-4877-b19c-29f13bf2ffea","Type":"ContainerStarted","Data":"bb69b23c9bc31c813655fa6315f31c8a255c30d59ffec90ce959475362050854"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.435822 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.437486 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-jq747" podStartSLOduration=121.437470883 podStartE2EDuration="2m1.437470883s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.434991464 +0000 UTC m=+145.003719213" watchObservedRunningTime="2025-09-29 13:46:54.437470883 +0000 UTC m=+145.006198632" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.438473 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-x2lkr" event={"ID":"4857752f-13d0-4996-b112-6410097f9c28","Type":"ContainerStarted","Data":"5109edeb422f483914366603e6cee558e2e2f12a0da828d75464c55f8f9852ba"} Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.445161 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:54.945147877 +0000 UTC m=+145.513875626 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.461687 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-5nvq7" podStartSLOduration=121.461672447 podStartE2EDuration="2m1.461672447s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.46106688 +0000 UTC m=+145.029794629" watchObservedRunningTime="2025-09-29 13:46:54.461672447 +0000 UTC m=+145.030400196" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.490430 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" event={"ID":"89180294-9bf1-495b-9c50-cd89c01bcd21","Type":"ContainerStarted","Data":"c69551049d0affc08640eadb0296c0b2a9066161f69ccbfb1dd54f085bed02ba"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.490480 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" event={"ID":"89180294-9bf1-495b-9c50-cd89c01bcd21","Type":"ContainerStarted","Data":"728d7b8f98c9e2e9cd35004f4bec059acb9d196f361ddb9cd9f02a0caf860177"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.490790 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-4vmhr" podStartSLOduration=121.490775255 podStartE2EDuration="2m1.490775255s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.490537669 +0000 UTC m=+145.059265418" watchObservedRunningTime="2025-09-29 13:46:54.490775255 +0000 UTC m=+145.059503004" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.504783 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" event={"ID":"ff58d142-bb1b-4d79-be30-7d8c42951e4c","Type":"ContainerStarted","Data":"df18d8f83e4466e0fd5e9309d3ed2ef70c6eafa9982b1952c4bbc24b79ebdfbe"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.523375 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" event={"ID":"157285ab-7f36-4042-a4ed-b975c55d7f27","Type":"ContainerStarted","Data":"85870ca3a11a6ef0a9ec86ef76698e18676632abdaa128bb244dbd00738c64af"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.523417 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" event={"ID":"157285ab-7f36-4042-a4ed-b975c55d7f27","Type":"ContainerStarted","Data":"8a6f7d2b4f8abd5c2d1a091c69c848595c8c7277a932d6f18763fb86b247bca2"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.530136 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-x2lkr" podStartSLOduration=121.530114729 podStartE2EDuration="2m1.530114729s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.522949511 +0000 UTC m=+145.091677260" watchObservedRunningTime="2025-09-29 13:46:54.530114729 +0000 UTC m=+145.098842478" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.536584 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.536862 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.036826526 +0000 UTC m=+145.605554275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.537117 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.538894 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.038880464 +0000 UTC m=+145.607608203 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.544272 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-v5cbt" podStartSLOduration=120.544256132 podStartE2EDuration="2m0.544256132s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.542995988 +0000 UTC m=+145.111723737" watchObservedRunningTime="2025-09-29 13:46:54.544256132 +0000 UTC m=+145.112983881" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.590401 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" event={"ID":"b02e5190-b670-4ec4-824f-a4f18cf79e33","Type":"ContainerStarted","Data":"a4916aae5c82e536ae962c050556a84748e5f274feeae9facb8f432f2af498b6"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.595786 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mj4rw" podStartSLOduration=121.595773015 podStartE2EDuration="2m1.595773015s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.593656076 +0000 UTC m=+145.162383825" watchObservedRunningTime="2025-09-29 13:46:54.595773015 +0000 UTC m=+145.164500764" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.641187 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.642444 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.142401532 +0000 UTC m=+145.711129281 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.644772 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-kcrwt" event={"ID":"cfca5304-d886-40b0-93ea-cb412ba053f9","Type":"ContainerStarted","Data":"f8fae3d39446cdcfb2504012cf39513d39b6b290242c53041e00c2a4913e2624"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.644996 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-kcrwt" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.656919 4634 patch_prober.go:28] interesting pod/downloads-7954f5f757-kcrwt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.657154 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-kcrwt" podUID="cfca5304-d886-40b0-93ea-cb412ba053f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.686684 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" event={"ID":"dad5e909-9da1-44cf-9369-f0c771794006","Type":"ContainerStarted","Data":"c847a4b620094223d992f55e4507a9d5b2b1ae97cbe7c445c9b44b07a9e80c4e"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.688407 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-kcrwt" podStartSLOduration=121.688394921 podStartE2EDuration="2m1.688394921s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.68727962 +0000 UTC m=+145.256007369" watchObservedRunningTime="2025-09-29 13:46:54.688394921 +0000 UTC m=+145.257122670" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.702795 4634 generic.go:334] "Generic (PLEG): container finished" podID="6e8b69ca-c934-4baa-8957-62b6aff5babc" containerID="592753961ebd56b5d43306f721b108c7bbed3710f664b7ac4eaca696d59c2c73" exitCode=0 Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.702865 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" event={"ID":"6e8b69ca-c934-4baa-8957-62b6aff5babc","Type":"ContainerDied","Data":"592753961ebd56b5d43306f721b108c7bbed3710f664b7ac4eaca696d59c2c73"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.711533 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" event={"ID":"addd2119-280d-4e65-94b2-82b42ccadb70","Type":"ContainerStarted","Data":"b39e2f5ed8ad6060791b1b8c6291883d648584f4f5a866328e2cd51a4cda633c"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.712320 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.714208 4634 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-4zvdk container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.714241 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" podUID="addd2119-280d-4e65-94b2-82b42ccadb70" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.728221 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vq2tm" podStartSLOduration=121.728206847 podStartE2EDuration="2m1.728206847s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.727025435 +0000 UTC m=+145.295753184" watchObservedRunningTime="2025-09-29 13:46:54.728206847 +0000 UTC m=+145.296934596" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.733170 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" event={"ID":"f6f19ec5-9530-4336-b19c-fa51a40e7dea","Type":"ContainerStarted","Data":"dd364e2b8e658454344e8f2a1728bd9def23f17e2b057923e4d6ad4cfa27ce84"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.744202 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.745436 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.245422416 +0000 UTC m=+145.814150165 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.810589 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" podStartSLOduration=120.810574648 podStartE2EDuration="2m0.810574648s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:54.761622817 +0000 UTC m=+145.330350566" watchObservedRunningTime="2025-09-29 13:46:54.810574648 +0000 UTC m=+145.379302397" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.845174 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.845904 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.345877359 +0000 UTC m=+145.914605108 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.848014 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.848839 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.348826191 +0000 UTC m=+145.917553930 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.850689 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" event={"ID":"89305210-320b-43d8-97ef-12a7809e9f73","Type":"ContainerStarted","Data":"ebfc61f5334c7c9fdea3f25eaff9dc15b5861fef5a262057fc9d6437bcc655c8"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.874511 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" event={"ID":"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3","Type":"ContainerStarted","Data":"f3173bd5ed261801e0d9c3d28726db1d296383d3e438ef1cbfd7114aa8c2c908"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.875544 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.895478 4634 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-gwwls container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.895812 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" podUID="30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.951571 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9z4x7" event={"ID":"a81c4f43-65d8-4496-ab69-1537e2d01ba1","Type":"ContainerStarted","Data":"eb2c84abe27c951e3cdb2b4417a1d81d7890146f5c0d077cfc3657188f5671bf"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.952065 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:54 crc kubenswrapper[4634]: E0929 13:46:54.953024 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.453010408 +0000 UTC m=+146.021738157 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.972449 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-64xkr" event={"ID":"d19555df-603d-4db0-9ea2-9a473a55380d","Type":"ContainerStarted","Data":"67091f5c91f3f2c67758aab389bc3eeb57dee54db8a58fafabd0878573cf26a9"} Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.972481 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.987588 4634 patch_prober.go:28] interesting pod/console-operator-58897d9998-64xkr container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/readyz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.987627 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-64xkr" podUID="d19555df-603d-4db0-9ea2-9a473a55380d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/readyz\": dial tcp 10.217.0.14:8443: connect: connection refused" Sep 29 13:46:54 crc kubenswrapper[4634]: I0929 13:46:54.995698 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" event={"ID":"5622c47f-366e-4649-86ad-76c631616d12","Type":"ContainerStarted","Data":"a7ac4a53b3ec33a8e0395fe03100865fa2d8d225e472f88c8ec09321d6d0f1fc"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.021888 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-zjrr2" event={"ID":"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf","Type":"ContainerStarted","Data":"88e13c9cbce1dcdadae8db5d1c990ceba0310767f0a46d0068507c9508d52463"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.022416 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-zjrr2" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.026963 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" event={"ID":"f8c7cf50-5af5-488b-a97b-c3452513d570","Type":"ContainerStarted","Data":"e687721de15f05c58406d3888c2da95baa605f7c2009142b22a4e6daf2522a11"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.027464 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.039631 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" event={"ID":"aac91c41-1c11-475a-a50e-ee5183bd8219","Type":"ContainerStarted","Data":"ff3b8e2bfb9fb8922ca4634927e16605c123909eb611097baf362fea00cf012c"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.040597 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" event={"ID":"4bf6c344-5613-4019-bc8d-5a15ad459f46","Type":"ContainerStarted","Data":"1a953be29073f777c05bd580706eb4ea90a55e971ce1a319f8dd8af97f8ade2b"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.042574 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" event={"ID":"418efd12-f21a-4866-abb0-84bf3ea7d929","Type":"ContainerStarted","Data":"aa08170f6ee40e8da2e3545e8746dca9347170539c57967bea02c5943842b4b4"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.044276 4634 generic.go:334] "Generic (PLEG): container finished" podID="4024807b-080e-4f06-a78e-021f46ec69af" containerID="7ff6d4e452a8bbea063213fae744e19cbcda77ddb842b14a044723aa1d1cef4c" exitCode=0 Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.044313 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" event={"ID":"4024807b-080e-4f06-a78e-021f46ec69af","Type":"ContainerDied","Data":"7ff6d4e452a8bbea063213fae744e19cbcda77ddb842b14a044723aa1d1cef4c"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.052097 4634 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-bhfql container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" start-of-body= Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.052158 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" podUID="f8c7cf50-5af5-488b-a97b-c3452513d570" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": dial tcp 10.217.0.34:5443: connect: connection refused" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.061061 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.063934 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.563917902 +0000 UTC m=+146.132645651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.094776 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" event={"ID":"b1425abd-681d-4fb7-a09a-b2ebb679ffa7","Type":"ContainerStarted","Data":"7a91278f588c003058481ee7fe436b04a00c15c092f7da807630f6b91af83bae"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.120179 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" event={"ID":"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be","Type":"ContainerStarted","Data":"f2e0cec6538f2602dec8b9f31d5c657f7a99ebb88e12badeeb1ac5b7eafbafaf"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.122914 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" podStartSLOduration=121.122896492 podStartE2EDuration="2m1.122896492s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.08685251 +0000 UTC m=+145.655580259" watchObservedRunningTime="2025-09-29 13:46:55.122896492 +0000 UTC m=+145.691624241" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.166101 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.166873 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.666856345 +0000 UTC m=+146.235584094 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.192596 4634 generic.go:334] "Generic (PLEG): container finished" podID="97f23fc8-1289-47c6-8bfc-49c7a338064d" containerID="2e0d01a12cc874346193e0a40c8cc6f80310f998d55bb592d3ec8722a60aa8a2" exitCode=0 Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.192685 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" event={"ID":"97f23fc8-1289-47c6-8bfc-49c7a338064d","Type":"ContainerDied","Data":"2e0d01a12cc874346193e0a40c8cc6f80310f998d55bb592d3ec8722a60aa8a2"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.231116 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" event={"ID":"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da","Type":"ContainerStarted","Data":"35ed01940a4f8c9785aa1114b2a1d35a2615b4d58e2f51da30195ddae7f45b90"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.239240 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.249707 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" event={"ID":"1d6051c8-2283-488a-8196-9a331c8ee74c","Type":"ContainerStarted","Data":"111dc85c9892c19aa3e1b5a747db405b9fa94722b1a2b32c16045fe4343cca79"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.249761 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" event={"ID":"1d6051c8-2283-488a-8196-9a331c8ee74c","Type":"ContainerStarted","Data":"f5494d53193293315578da7c443f9c14eb09188e90dbec69f197901cc505d717"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.267810 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.268212 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.768197852 +0000 UTC m=+146.336925591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.270071 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" event={"ID":"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab","Type":"ContainerStarted","Data":"e379246218e551d9b710330661dda9fd35fb27ef0317d38c4b8e56011a7d347f"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.270145 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" event={"ID":"b09096e0-66ea-4ec7-b5cc-d7bdae86b9ab","Type":"ContainerStarted","Data":"aab1b3a6a8c6647d7cb36cc05686f5cb78ec9ab76778cd7d76fdfc645ebdf220"} Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.284397 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.343045 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-9z4x7" podStartSLOduration=7.343030563 podStartE2EDuration="7.343030563s" podCreationTimestamp="2025-09-29 13:46:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.341958973 +0000 UTC m=+145.910686722" watchObservedRunningTime="2025-09-29 13:46:55.343030563 +0000 UTC m=+145.911758312" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.343468 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c2w5x" podStartSLOduration=122.343463335 podStartE2EDuration="2m2.343463335s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.256201829 +0000 UTC m=+145.824929578" watchObservedRunningTime="2025-09-29 13:46:55.343463335 +0000 UTC m=+145.912191084" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.368405 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.369518 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.869497479 +0000 UTC m=+146.438225238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.414018 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.420336 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:46:55 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:46:55 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:46:55 crc kubenswrapper[4634]: healthz check failed Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.420393 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.425066 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-zjrr2" podStartSLOduration=7.425055084 podStartE2EDuration="7.425055084s" podCreationTimestamp="2025-09-29 13:46:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.425038623 +0000 UTC m=+145.993766372" watchObservedRunningTime="2025-09-29 13:46:55.425055084 +0000 UTC m=+145.993782833" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.486946 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.487247 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:55.987235553 +0000 UTC m=+146.555963302 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.525585 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-64xkr" podStartSLOduration=122.525571469 podStartE2EDuration="2m2.525571469s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.523340907 +0000 UTC m=+146.092068656" watchObservedRunningTime="2025-09-29 13:46:55.525571469 +0000 UTC m=+146.094299218" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.548215 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" podStartSLOduration=121.548196968 podStartE2EDuration="2m1.548196968s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.54647452 +0000 UTC m=+146.115202269" watchObservedRunningTime="2025-09-29 13:46:55.548196968 +0000 UTC m=+146.116924717" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.588313 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.588684 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.088668383 +0000 UTC m=+146.657396132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.591219 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" podStartSLOduration=115.591199353 podStartE2EDuration="1m55.591199353s" podCreationTimestamp="2025-09-29 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.589041783 +0000 UTC m=+146.157769532" watchObservedRunningTime="2025-09-29 13:46:55.591199353 +0000 UTC m=+146.159927102" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.633985 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-s52wd" podStartSLOduration=122.633936451 podStartE2EDuration="2m2.633936451s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.631901515 +0000 UTC m=+146.200629264" watchObservedRunningTime="2025-09-29 13:46:55.633936451 +0000 UTC m=+146.202664190" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.689541 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.689812 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.189801485 +0000 UTC m=+146.758529234 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.787390 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-hblk6" podStartSLOduration=121.787357897 podStartE2EDuration="2m1.787357897s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.714532892 +0000 UTC m=+146.283260641" watchObservedRunningTime="2025-09-29 13:46:55.787357897 +0000 UTC m=+146.356085646" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.793543 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.793966 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.293950311 +0000 UTC m=+146.862678060 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.894713 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.895255 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.395244378 +0000 UTC m=+146.963972117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.895362 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-pccc9" podStartSLOduration=122.89534025 podStartE2EDuration="2m2.89534025s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:55.826937758 +0000 UTC m=+146.395665507" watchObservedRunningTime="2025-09-29 13:46:55.89534025 +0000 UTC m=+146.464067999" Sep 29 13:46:55 crc kubenswrapper[4634]: I0929 13:46:55.995624 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:55 crc kubenswrapper[4634]: E0929 13:46:55.996029 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.49601483 +0000 UTC m=+147.064742579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.097170 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.097539 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.597527882 +0000 UTC m=+147.166255631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.198628 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.198797 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.698768837 +0000 UTC m=+147.267496586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.199225 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.199503 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.699490297 +0000 UTC m=+147.268218046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.278040 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" event={"ID":"157285ab-7f36-4042-a4ed-b975c55d7f27","Type":"ContainerStarted","Data":"b9c11ab0c66ca6a57311d6ba74086a881a4f4caa3617de5d3e62386bd4a81407"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.280106 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" event={"ID":"6e8b69ca-c934-4baa-8957-62b6aff5babc","Type":"ContainerStarted","Data":"4c612979d8d645249a8e47a6f07396c257d7fdc0240eda0e88c5bb2ee34356a2"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.281801 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" event={"ID":"aac91c41-1c11-475a-a50e-ee5183bd8219","Type":"ContainerStarted","Data":"48fd1752ff95529816025955855e508bc8e86e151263fd9d41788b3babb49e17"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.283331 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" event={"ID":"f6f19ec5-9530-4336-b19c-fa51a40e7dea","Type":"ContainerStarted","Data":"17ccf2a5b9cedd5172f4c5665426fcd9e928dabba2fb14caed35f1d5e1fabdb7"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.285036 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" event={"ID":"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70","Type":"ContainerStarted","Data":"eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.285769 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.287304 4634 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-9v7l5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.287342 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" podUID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.288530 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" event={"ID":"89305210-320b-43d8-97ef-12a7809e9f73","Type":"ContainerStarted","Data":"543e55eefb8821aa51c7d1147bc76cf0635d2c63d957075a900909a1169db68a"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.290176 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-zjrr2" event={"ID":"5806aa9c-9a8b-4e8b-ad14-fb1ae45b0eaf","Type":"ContainerStarted","Data":"d1045e7b2293847849f7e28f3033d33eeed0fe9086f4c576527fef55080a609d"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.291834 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" event={"ID":"97f23fc8-1289-47c6-8bfc-49c7a338064d","Type":"ContainerStarted","Data":"a31a2562d1688aab66e61fac5a1041929ba9c328f0e1b8579e3171c73774e451"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.292783 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" event={"ID":"418efd12-f21a-4866-abb0-84bf3ea7d929","Type":"ContainerStarted","Data":"781d173a6b37149c266968e4309659eb8b9ac52a2b85cc6623d6570972ea05ca"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.294616 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" event={"ID":"e6cb1123-1a52-40e3-9b0c-2ce0eb52cae2","Type":"ContainerStarted","Data":"19ece00c51c3a46213856691e14a1ccd692bb94dd2d210664255c9aae2f7d04c"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.296325 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" event={"ID":"b02e5190-b670-4ec4-824f-a4f18cf79e33","Type":"ContainerStarted","Data":"bd192a405e532ce76ac3d96f976b7684a301d3a3e79899fe6d33a2a266735bc0"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.296362 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" event={"ID":"b02e5190-b670-4ec4-824f-a4f18cf79e33","Type":"ContainerStarted","Data":"1e5889ff02aadeda7b056321d6126a8e1043efe1156c6614d32786f64c57df87"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.298548 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" event={"ID":"4024807b-080e-4f06-a78e-021f46ec69af","Type":"ContainerStarted","Data":"84ea3f89ec0b53f7731438e3479dde2bd2062f8018fcb317426c3384b504c93c"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.298615 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.305478 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.306065 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.80604932 +0000 UTC m=+147.374777069 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.320218 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-9z4x7" event={"ID":"a81c4f43-65d8-4496-ab69-1537e2d01ba1","Type":"ContainerStarted","Data":"dc14b2adec564aaea7e0e108b889ff9924257f5e700a96b6cc284ab52fdf82d6"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.323032 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" event={"ID":"4bf6c344-5613-4019-bc8d-5a15ad459f46","Type":"ContainerStarted","Data":"79f3325876b417d1a9d7f167fdc1952aeb82f438a3353abb5804a9a4b8a30051"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.323061 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" event={"ID":"4bf6c344-5613-4019-bc8d-5a15ad459f46","Type":"ContainerStarted","Data":"df5c2c13b2b8107a39960781afdc0daae2f41186be45d90310a0fb16dfd8861e"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.323462 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.329969 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" event={"ID":"30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3","Type":"ContainerStarted","Data":"8ab1c0658b37fecb01ee74fa1d7a4f3391b68c663f5f68e7ee56cba56297b3d0"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.331013 4634 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-gwwls container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.331097 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" podUID="30b96b5f-6d0e-424b-9fe8-7c36dc95e8b3" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.333553 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-shmnq" podStartSLOduration=122.333539064 podStartE2EDuration="2m2.333539064s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.333409141 +0000 UTC m=+146.902136890" watchObservedRunningTime="2025-09-29 13:46:56.333539064 +0000 UTC m=+146.902266813" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.336021 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" event={"ID":"5653fd8a-02c3-4169-8197-d9b2b5cd9086","Type":"ContainerStarted","Data":"1a6879df0fcfecc9b237111cd1114eb574ea08c0de63232da3fc3e3435ad2be2"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.336058 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" event={"ID":"5653fd8a-02c3-4169-8197-d9b2b5cd9086","Type":"ContainerStarted","Data":"a9b91ab7bbc04dc38a51d72a687633ee006b3d40e47d05b08356d38d25240d8b"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.342400 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" event={"ID":"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be","Type":"ContainerStarted","Data":"febefc685de1467014cc5aa29d1000bab8f4494946e39284bf307e4c313f2a60"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.342441 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" event={"ID":"f1ca81ca-ca2a-4f83-9a40-ef8efdc1c7be","Type":"ContainerStarted","Data":"f8d949a4a4c44f18daa473e30c6fc5ca8175415c06a4ef467c44717c36b68988"} Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.352056 4634 patch_prober.go:28] interesting pod/downloads-7954f5f757-kcrwt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.352112 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-kcrwt" podUID="cfca5304-d886-40b0-93ea-cb412ba053f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.372345 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zvdk" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.398479 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-m4gqz" podStartSLOduration=122.398463089 podStartE2EDuration="2m2.398463089s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.397498093 +0000 UTC m=+146.966225852" watchObservedRunningTime="2025-09-29 13:46:56.398463089 +0000 UTC m=+146.967190838" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.408072 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.422185 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:56.922169949 +0000 UTC m=+147.490897698 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.435503 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:46:56 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:46:56 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:46:56 crc kubenswrapper[4634]: healthz check failed Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.435555 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.437008 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" podStartSLOduration=122.436997511 podStartE2EDuration="2m2.436997511s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.425699447 +0000 UTC m=+146.994427196" watchObservedRunningTime="2025-09-29 13:46:56.436997511 +0000 UTC m=+147.005725260" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.486983 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" podStartSLOduration=122.48696611 podStartE2EDuration="2m2.48696611s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.486463047 +0000 UTC m=+147.055190796" watchObservedRunningTime="2025-09-29 13:46:56.48696611 +0000 UTC m=+147.055693859" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.510266 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.510454 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.010429562 +0000 UTC m=+147.579157311 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.510589 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.510887 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.010876776 +0000 UTC m=+147.579604525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.599039 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-42nsw" podStartSLOduration=123.599011216 podStartE2EDuration="2m3.599011216s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.538478602 +0000 UTC m=+147.107206351" watchObservedRunningTime="2025-09-29 13:46:56.599011216 +0000 UTC m=+147.167738965" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.611968 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.612386 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.112371038 +0000 UTC m=+147.681098777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.706065 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" podStartSLOduration=122.706048612 podStartE2EDuration="2m2.706048612s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.704159189 +0000 UTC m=+147.272886938" watchObservedRunningTime="2025-09-29 13:46:56.706048612 +0000 UTC m=+147.274776361" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.706342 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8sstx" podStartSLOduration=123.70633761 podStartE2EDuration="2m3.70633761s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.605812075 +0000 UTC m=+147.174539824" watchObservedRunningTime="2025-09-29 13:46:56.70633761 +0000 UTC m=+147.275065349" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.713759 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.714064 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.214052295 +0000 UTC m=+147.782780044 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.769578 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-27cm7" podStartSLOduration=123.769558878 podStartE2EDuration="2m3.769558878s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.766065191 +0000 UTC m=+147.334792940" watchObservedRunningTime="2025-09-29 13:46:56.769558878 +0000 UTC m=+147.338286627" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.814594 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.814968 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.31494198 +0000 UTC m=+147.883669729 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.871533 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" podStartSLOduration=123.871514773 podStartE2EDuration="2m3.871514773s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.861510085 +0000 UTC m=+147.430237834" watchObservedRunningTime="2025-09-29 13:46:56.871514773 +0000 UTC m=+147.440242522" Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.915833 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:56 crc kubenswrapper[4634]: E0929 13:46:56.916194 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.416178335 +0000 UTC m=+147.984906074 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:56 crc kubenswrapper[4634]: I0929 13:46:56.926467 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-nbc4c" podStartSLOduration=123.926450231 podStartE2EDuration="2m3.926450231s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.926093731 +0000 UTC m=+147.494821480" watchObservedRunningTime="2025-09-29 13:46:56.926450231 +0000 UTC m=+147.495177980" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.000270 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-mwwmx" podStartSLOduration=123.000249822 podStartE2EDuration="2m3.000249822s" podCreationTimestamp="2025-09-29 13:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:56.999263175 +0000 UTC m=+147.567990924" watchObservedRunningTime="2025-09-29 13:46:57.000249822 +0000 UTC m=+147.568977571" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.017203 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.017388 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.517361698 +0000 UTC m=+148.086089447 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.017596 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.017902 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.517894483 +0000 UTC m=+148.086622232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.118769 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.118963 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.618923493 +0000 UTC m=+148.187651242 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.119068 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.119431 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.619423266 +0000 UTC m=+148.188151005 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.169666 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-2z2nc" podStartSLOduration=124.169647933 podStartE2EDuration="2m4.169647933s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:57.133141688 +0000 UTC m=+147.701869437" watchObservedRunningTime="2025-09-29 13:46:57.169647933 +0000 UTC m=+147.738375682" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.225486 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.225624 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.725598159 +0000 UTC m=+148.294325908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.225797 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.226044 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.72603141 +0000 UTC m=+148.294759159 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.327237 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.327560 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.827544983 +0000 UTC m=+148.396272732 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.346620 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7hz92" event={"ID":"8269ba4f-674f-4886-aff2-1474500e1c38","Type":"ContainerStarted","Data":"b617dbba399512d679779e353d6b6698d23243951e81549f4def001780f4ec4e"} Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.348221 4634 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-bhfql container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.348293 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" podUID="f8c7cf50-5af5-488b-a97b-c3452513d570" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.348661 4634 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-vvk4f container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.27:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.348727 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" podUID="172d0968-1bd7-48d4-9bcd-62590bead86c" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.27:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.349140 4634 patch_prober.go:28] interesting pod/console-operator-58897d9998-64xkr container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/readyz\": context deadline exceeded" start-of-body= Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.349201 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" event={"ID":"97f23fc8-1289-47c6-8bfc-49c7a338064d","Type":"ContainerStarted","Data":"cabf4bdad16684c8183e6148f5d8a49dcd782c0ec9b5773a8c6a6f10362eaec5"} Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.349208 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-64xkr" podUID="d19555df-603d-4db0-9ea2-9a473a55380d" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/readyz\": context deadline exceeded" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.350301 4634 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-9v7l5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.350331 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" podUID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.399566 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-gwwls" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.417949 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:46:57 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:46:57 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:46:57 crc kubenswrapper[4634]: healthz check failed Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.418022 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.431441 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.432016 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:57.932000507 +0000 UTC m=+148.500728256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.491255 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gpgzb" podStartSLOduration=124.491238174 podStartE2EDuration="2m4.491238174s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:57.225059783 +0000 UTC m=+147.793787522" watchObservedRunningTime="2025-09-29 13:46:57.491238174 +0000 UTC m=+148.059965913" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.491946 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" podStartSLOduration=124.491940564 podStartE2EDuration="2m4.491940564s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:46:57.491483892 +0000 UTC m=+148.060211641" watchObservedRunningTime="2025-09-29 13:46:57.491940564 +0000 UTC m=+148.060668313" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.533339 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.533496 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.033469199 +0000 UTC m=+148.602196948 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.533544 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.533813 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.033802138 +0000 UTC m=+148.602529887 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.635548 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.635747 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.135712711 +0000 UTC m=+148.704440460 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.635961 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.636289 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.136275457 +0000 UTC m=+148.705003196 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.737297 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.737482 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.23745522 +0000 UTC m=+148.806182969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.737803 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.738129 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.238116259 +0000 UTC m=+148.806844008 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.738465 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qnr2w"] Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.739356 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: W0929 13:46:57.749197 4634 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.749237 4634 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.791532 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnr2w"] Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.839856 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.840035 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.340007352 +0000 UTC m=+148.908735101 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.840220 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-catalog-content\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.840273 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.840354 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t88d9\" (UniqueName: \"kubernetes.io/projected/e93988bf-f719-4ad5-a46c-73e3877b3e6b-kube-api-access-t88d9\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.840391 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-utilities\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.840650 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.340643419 +0000 UTC m=+148.909371168 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.915262 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q4lqj"] Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.916238 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.918270 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.941143 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.941329 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-catalog-content\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.941414 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t88d9\" (UniqueName: \"kubernetes.io/projected/e93988bf-f719-4ad5-a46c-73e3877b3e6b-kube-api-access-t88d9\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.941442 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-utilities\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.941485 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.941512 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.941530 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:57 crc kubenswrapper[4634]: E0929 13:46:57.943768 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.443751177 +0000 UTC m=+149.012478926 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.944156 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-catalog-content\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.944894 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-utilities\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.949216 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.950269 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.952228 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q4lqj"] Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.967773 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:57 crc kubenswrapper[4634]: I0929 13:46:57.992911 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t88d9\" (UniqueName: \"kubernetes.io/projected/e93988bf-f719-4ad5-a46c-73e3877b3e6b-kube-api-access-t88d9\") pod \"certified-operators-qnr2w\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.044406 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-utilities\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.044710 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-catalog-content\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.044742 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62w8c\" (UniqueName: \"kubernetes.io/projected/27ebe57d-6e97-49c8-8106-f4e6983b3e30-kube-api-access-62w8c\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.044772 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.044800 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.045463 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.545439184 +0000 UTC m=+149.114166933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.057762 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.104246 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gpvqs"] Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.105107 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.126978 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.138139 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.142703 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.148730 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.148951 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.648933921 +0000 UTC m=+149.217661670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.149072 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.149160 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-utilities\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.149188 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-catalog-content\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.149217 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62w8c\" (UniqueName: \"kubernetes.io/projected/27ebe57d-6e97-49c8-8106-f4e6983b3e30-kube-api-access-62w8c\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.149699 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.649690913 +0000 UTC m=+149.218418662 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.149970 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-utilities\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.150170 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-catalog-content\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.154393 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.172587 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gpvqs"] Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.210892 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62w8c\" (UniqueName: \"kubernetes.io/projected/27ebe57d-6e97-49c8-8106-f4e6983b3e30-kube-api-access-62w8c\") pod \"community-operators-q4lqj\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.234401 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.250658 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.250827 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-utilities\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.250866 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-catalog-content\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.250889 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr9ff\" (UniqueName: \"kubernetes.io/projected/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-kube-api-access-wr9ff\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.250994 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.750979759 +0000 UTC m=+149.319707508 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.332855 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lx58l"] Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.333776 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.349471 4634 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-bhfql container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.349518 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" podUID="f8c7cf50-5af5-488b-a97b-c3452513d570" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.353981 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-utilities\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.354025 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.354041 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-catalog-content\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.354062 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr9ff\" (UniqueName: \"kubernetes.io/projected/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-kube-api-access-wr9ff\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.354409 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.854397625 +0000 UTC m=+149.423125374 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.354782 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-utilities\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.354768 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-catalog-content\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.378834 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7hz92" event={"ID":"8269ba4f-674f-4886-aff2-1474500e1c38","Type":"ContainerStarted","Data":"b936e475c9acb39bc9352a40d01aed075cd9dadbc11b1213569e4422ca6d7991"} Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.380234 4634 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-9v7l5 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.380283 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" podUID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.425779 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lx58l"] Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.442337 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:46:58 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:46:58 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:46:58 crc kubenswrapper[4634]: healthz check failed Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.442396 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.453352 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr9ff\" (UniqueName: \"kubernetes.io/projected/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-kube-api-access-wr9ff\") pod \"certified-operators-gpvqs\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.455013 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.455261 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-catalog-content\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.455514 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-utilities\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.455535 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffj5t\" (UniqueName: \"kubernetes.io/projected/df83a2e1-7545-44da-91a3-72925fe5fd1b-kube-api-access-ffj5t\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.456206 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:58.956192085 +0000 UTC m=+149.524919834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.558715 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-utilities\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.558749 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffj5t\" (UniqueName: \"kubernetes.io/projected/df83a2e1-7545-44da-91a3-72925fe5fd1b-kube-api-access-ffj5t\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.558794 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-catalog-content\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.558820 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.559070 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.059060276 +0000 UTC m=+149.627788025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.559803 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-utilities\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.560007 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-catalog-content\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.604737 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffj5t\" (UniqueName: \"kubernetes.io/projected/df83a2e1-7545-44da-91a3-72925fe5fd1b-kube-api-access-ffj5t\") pod \"community-operators-lx58l\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.659519 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.662388 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.662521 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.162503362 +0000 UTC m=+149.731231111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.662607 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.662897 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.162889563 +0000 UTC m=+149.731617312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.711424 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.716657 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.724983 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.766180 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.766338 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.266317879 +0000 UTC m=+149.835045628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.766512 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.766838 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.266828503 +0000 UTC m=+149.835556252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.868262 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.868611 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.368596152 +0000 UTC m=+149.937323901 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:58 crc kubenswrapper[4634]: I0929 13:46:58.969628 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:58 crc kubenswrapper[4634]: E0929 13:46:58.970169 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.470156845 +0000 UTC m=+150.038884594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.073002 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.073330 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.573315574 +0000 UTC m=+150.142043323 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.174037 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.174312 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.674299402 +0000 UTC m=+150.243027151 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.275636 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.275785 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.775767983 +0000 UTC m=+150.344495742 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.276133 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.278304 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.778290833 +0000 UTC m=+150.347018582 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.377533 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.378157 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.87814071 +0000 UTC m=+150.446868459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.398374 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7hz92" event={"ID":"8269ba4f-674f-4886-aff2-1474500e1c38","Type":"ContainerStarted","Data":"88dbca7fa360872453a880070105c013c1ddd03006425ba639badf26cb9f5fcf"} Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.424322 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e786970baea87b13521669268f99652c16bfef3b6ff77e538825ed9b15411456"} Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.431285 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:46:59 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:46:59 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:46:59 crc kubenswrapper[4634]: healthz check failed Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.431332 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.478988 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.479333 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:46:59.979320733 +0000 UTC m=+150.548048482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.579669 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.579944 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.079928141 +0000 UTC m=+150.648655890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.681463 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.682017 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.182003529 +0000 UTC m=+150.750731278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.786733 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.787133 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.287114422 +0000 UTC m=+150.855842171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.887892 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.888265 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.388254134 +0000 UTC m=+150.956981873 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.929024 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7dc9w"] Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.938223 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.974784 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.976480 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dc9w"] Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.994305 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.994405 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.494379685 +0000 UTC m=+151.063107434 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:46:59 crc kubenswrapper[4634]: I0929 13:46:59.994932 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:46:59 crc kubenswrapper[4634]: E0929 13:46:59.995457 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.495435134 +0000 UTC m=+151.064162973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.097691 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q4lqj"] Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.098132 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.098270 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpfs9\" (UniqueName: \"kubernetes.io/projected/422b00d2-0df9-4778-8040-d2b175d8e67e-kube-api-access-gpfs9\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.098325 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-utilities\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.098362 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-catalog-content\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.098449 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.598435698 +0000 UTC m=+151.167163447 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.199897 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.199939 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpfs9\" (UniqueName: \"kubernetes.io/projected/422b00d2-0df9-4778-8040-d2b175d8e67e-kube-api-access-gpfs9\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.199993 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-utilities\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.200019 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-catalog-content\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.200316 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.7002974 +0000 UTC m=+151.269025149 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.200445 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-catalog-content\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.200732 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-utilities\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.290000 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpfs9\" (UniqueName: \"kubernetes.io/projected/422b00d2-0df9-4778-8040-d2b175d8e67e-kube-api-access-gpfs9\") pod \"redhat-marketplace-7dc9w\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.292152 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gpvqs"] Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.300794 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.301643 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.801595947 +0000 UTC m=+151.370323696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.308429 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:00 crc kubenswrapper[4634]: W0929 13:47:00.331055 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc80b96a_5e2d_4194_8d8c_fb0bf22a58c8.slice/crio-c28494fbbf49f0ab48ec4b9828a2fe59d3c0f2125916466565693961bad4c869 WatchSource:0}: Error finding container c28494fbbf49f0ab48ec4b9828a2fe59d3c0f2125916466565693961bad4c869: Status 404 returned error can't find the container with id c28494fbbf49f0ab48ec4b9828a2fe59d3c0f2125916466565693961bad4c869 Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.378155 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5892t"] Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.379383 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.393368 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5892t"] Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.402023 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.402321 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:00.902309428 +0000 UTC m=+151.471037177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.421214 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:00 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:00 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:00 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.421254 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.460347 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4lqj" event={"ID":"27ebe57d-6e97-49c8-8106-f4e6983b3e30","Type":"ContainerStarted","Data":"28b5e9ff6ea6ee6497efcea7261ebc738f6128ec2b72b3abf4c6e58e3c34ebd8"} Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.477597 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d9976063c4b4b37c7eb62e528bf23f31e21cc96b6e7772572f266c020afdcc2f"} Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.477639 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"e680babf8506435112da3e627112a4aa6aa20f648c6ec806b5f0a8ff0b314505"} Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.478180 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.479548 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gpvqs" event={"ID":"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8","Type":"ContainerStarted","Data":"c28494fbbf49f0ab48ec4b9828a2fe59d3c0f2125916466565693961bad4c869"} Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.481050 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-7hz92" event={"ID":"8269ba4f-674f-4886-aff2-1474500e1c38","Type":"ContainerStarted","Data":"444185aa730c763089d15812e62e417a2ec1d3338739ccbb3ab79356703e42a1"} Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.482362 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ab2a953fd06b016ecc10be8a88456cbde43b8dc8dbd004a066a183531838ac57"} Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.482386 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1478614c8b7aa46704969c520d5ec1c99da62b09a143715d06497bed809137c0"} Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.483414 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e718283b90f3d5294d871d5bd19cb496fcdabfd8bc7336c9f3a1e96924dd943d"} Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.504432 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.504742 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr9bx\" (UniqueName: \"kubernetes.io/projected/48259401-fe1f-4b48-9574-af1c32bccce5-kube-api-access-gr9bx\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.504769 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-utilities\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.504808 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-catalog-content\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.504914 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.004883229 +0000 UTC m=+151.573610988 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.517288 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnr2w"] Sep 29 13:47:00 crc kubenswrapper[4634]: W0929 13:47:00.574841 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode93988bf_f719_4ad5_a46c_73e3877b3e6b.slice/crio-ac94e66c23bdf42f7d98b66e279c66ca58ef3c1c2e496a920267fff53178b98e WatchSource:0}: Error finding container ac94e66c23bdf42f7d98b66e279c66ca58ef3c1c2e496a920267fff53178b98e: Status 404 returned error can't find the container with id ac94e66c23bdf42f7d98b66e279c66ca58ef3c1c2e496a920267fff53178b98e Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.596666 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lx58l"] Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.608455 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.608532 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr9bx\" (UniqueName: \"kubernetes.io/projected/48259401-fe1f-4b48-9574-af1c32bccce5-kube-api-access-gr9bx\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.608597 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-utilities\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.608689 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-catalog-content\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.608891 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.10887391 +0000 UTC m=+151.677601659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.610293 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-utilities\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.610689 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-catalog-content\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.652230 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr9bx\" (UniqueName: \"kubernetes.io/projected/48259401-fe1f-4b48-9574-af1c32bccce5-kube-api-access-gr9bx\") pod \"redhat-marketplace-5892t\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.709632 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.709903 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.209887089 +0000 UTC m=+151.778614838 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.709950 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.710443 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.210425645 +0000 UTC m=+151.779153394 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.764358 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.812472 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.813196 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.313180971 +0000 UTC m=+151.881908710 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.913944 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.914275 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.414263742 +0000 UTC m=+151.982991481 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.954953 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-7hz92" podStartSLOduration=12.954934393 podStartE2EDuration="12.954934393s" podCreationTimestamp="2025-09-29 13:46:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:47:00.752213167 +0000 UTC m=+151.320940916" watchObservedRunningTime="2025-09-29 13:47:00.954934393 +0000 UTC m=+151.523662142" Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.956318 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-44875"] Sep 29 13:47:00 crc kubenswrapper[4634]: I0929 13:47:00.957296 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:00 crc kubenswrapper[4634]: W0929 13:47:00.963161 4634 reflector.go:561] object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh": failed to list *v1.Secret: secrets "redhat-operators-dockercfg-ct8rh" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Sep 29 13:47:00 crc kubenswrapper[4634]: E0929 13:47:00.963204 4634 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"redhat-operators-dockercfg-ct8rh\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"redhat-operators-dockercfg-ct8rh\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.016197 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:47:01 crc kubenswrapper[4634]: E0929 13:47:01.016542 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.516526096 +0000 UTC m=+152.085253835 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.026352 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.026516 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.036286 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.058642 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-44875"] Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.071136 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.071173 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.086289 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-64xkr" Sep 29 13:47:01 crc kubenswrapper[4634]: E0929 13:47:01.093917 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc80b96a_5e2d_4194_8d8c_fb0bf22a58c8.slice/crio-conmon-d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf83a2e1_7545_44da_91a3_72925fe5fd1b.slice/crio-52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf83a2e1_7545_44da_91a3_72925fe5fd1b.slice/crio-conmon-52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode93988bf_f719_4ad5_a46c_73e3877b3e6b.slice/crio-conmon-e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745.scope\": RecentStats: unable to find data in memory cache]" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.128933 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-utilities\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.129010 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.129036 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kpct\" (UniqueName: \"kubernetes.io/projected/54680624-14fd-47bf-829b-1f0342ed7db1-kube-api-access-5kpct\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.129061 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-catalog-content\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: E0929 13:47:01.129851 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.629839576 +0000 UTC m=+152.198567325 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.150122 4634 patch_prober.go:28] interesting pod/downloads-7954f5f757-kcrwt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.150167 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-kcrwt" podUID="cfca5304-d886-40b0-93ea-cb412ba053f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.150458 4634 patch_prober.go:28] interesting pod/downloads-7954f5f757-kcrwt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.150477 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-kcrwt" podUID="cfca5304-d886-40b0-93ea-cb412ba053f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.230741 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:47:01 crc kubenswrapper[4634]: E0929 13:47:01.231070 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.73104593 +0000 UTC m=+152.299773679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.231209 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.231234 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kpct\" (UniqueName: \"kubernetes.io/projected/54680624-14fd-47bf-829b-1f0342ed7db1-kube-api-access-5kpct\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.231256 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-catalog-content\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.231393 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-utilities\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: E0929 13:47:01.232953 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 13:47:01.732936003 +0000 UTC m=+152.301663752 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xwzp5" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.233494 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-utilities\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.233588 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-catalog-content\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.246299 4634 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.263240 4634 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-29T13:47:01.246325035Z","Handler":null,"Name":""} Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.265924 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kpct\" (UniqueName: \"kubernetes.io/projected/54680624-14fd-47bf-829b-1f0342ed7db1-kube-api-access-5kpct\") pod \"redhat-operators-44875\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.279472 4634 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.279515 4634 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.302534 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6nrlj"] Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.303443 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.324375 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6nrlj"] Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.324642 4634 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-md5b8 container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.324727 4634 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-md5b8 container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.324782 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" podUID="4024807b-080e-4f06-a78e-021f46ec69af" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.324688 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" podUID="4024807b-080e-4f06-a78e-021f46ec69af" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.333599 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.348071 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.420144 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.420762 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.421168 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.421845 4634 patch_prober.go:28] interesting pod/console-f9d7485db-5nvq7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.421882 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-5nvq7" podUID="95f9d479-e9b9-4086-8792-83625bfaff6e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.434615 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvthq\" (UniqueName: \"kubernetes.io/projected/bbd41564-d1da-427c-8bd6-6a2faea397cc-kube-api-access-fvthq\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.434666 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.434696 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-catalog-content\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.434744 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-utilities\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.441577 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:01 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:01 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:01 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.441623 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.478523 4634 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.478561 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.498110 4634 generic.go:334] "Generic (PLEG): container finished" podID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerID="e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745" exitCode=0 Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.498690 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnr2w" event={"ID":"e93988bf-f719-4ad5-a46c-73e3877b3e6b","Type":"ContainerDied","Data":"e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745"} Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.498716 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnr2w" event={"ID":"e93988bf-f719-4ad5-a46c-73e3877b3e6b","Type":"ContainerStarted","Data":"ac94e66c23bdf42f7d98b66e279c66ca58ef3c1c2e496a920267fff53178b98e"} Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.503541 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.510318 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.520605 4634 generic.go:334] "Generic (PLEG): container finished" podID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerID="f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d" exitCode=0 Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.520682 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4lqj" event={"ID":"27ebe57d-6e97-49c8-8106-f4e6983b3e30","Type":"ContainerDied","Data":"f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d"} Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.538001 4634 generic.go:334] "Generic (PLEG): container finished" podID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerID="52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019" exitCode=0 Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.538122 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx58l" event={"ID":"df83a2e1-7545-44da-91a3-72925fe5fd1b","Type":"ContainerDied","Data":"52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019"} Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.538147 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx58l" event={"ID":"df83a2e1-7545-44da-91a3-72925fe5fd1b","Type":"ContainerStarted","Data":"dd0eab8e5e3a49ee6f6851ad035f8c3b6cb7c4d4095b8e7ebe9b91b904b8760d"} Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.538949 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-catalog-content\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.539028 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-utilities\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.539202 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvthq\" (UniqueName: \"kubernetes.io/projected/bbd41564-d1da-427c-8bd6-6a2faea397cc-kube-api-access-fvthq\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.539698 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-utilities\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.541463 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-catalog-content\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.552472 4634 generic.go:334] "Generic (PLEG): container finished" podID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerID="d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b" exitCode=0 Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.571749 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gpvqs" event={"ID":"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8","Type":"ContainerDied","Data":"d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b"} Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.583824 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bhfql" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.605269 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-vbq68" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.641006 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5892t"] Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.647840 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvthq\" (UniqueName: \"kubernetes.io/projected/bbd41564-d1da-427c-8bd6-6a2faea397cc-kube-api-access-fvthq\") pod \"redhat-operators-6nrlj\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.772397 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dc9w"] Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.829147 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xwzp5\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.911141 4634 patch_prober.go:28] interesting pod/apiserver-76f77b778f-rgfdh container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]log ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]etcd ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/generic-apiserver-start-informers ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/max-in-flight-filter ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/image.openshift.io-apiserver-caches ok Sep 29 13:47:01 crc kubenswrapper[4634]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Sep 29 13:47:01 crc kubenswrapper[4634]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/project.openshift.io-projectcache ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/openshift.io-startinformers ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/openshift.io-restmapperupdater ok Sep 29 13:47:01 crc kubenswrapper[4634]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 29 13:47:01 crc kubenswrapper[4634]: livez check failed Sep 29 13:47:01 crc kubenswrapper[4634]: I0929 13:47:01.911411 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" podUID="97f23fc8-1289-47c6-8bfc-49c7a338064d" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.000382 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.117708 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.208296 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.212774 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.218532 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.400468 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwzp5"] Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.419245 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:02 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:02 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:02 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.419329 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.561003 4634 generic.go:334] "Generic (PLEG): container finished" podID="1a7bd45c-8ec4-4af0-b4cb-812ab9b173da" containerID="35ed01940a4f8c9785aa1114b2a1d35a2615b4d58e2f51da30195ddae7f45b90" exitCode=0 Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.561056 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" event={"ID":"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da","Type":"ContainerDied","Data":"35ed01940a4f8c9785aa1114b2a1d35a2615b4d58e2f51da30195ddae7f45b90"} Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.568895 4634 generic.go:334] "Generic (PLEG): container finished" podID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerID="882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e" exitCode=0 Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.568953 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dc9w" event={"ID":"422b00d2-0df9-4778-8040-d2b175d8e67e","Type":"ContainerDied","Data":"882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e"} Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.568978 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dc9w" event={"ID":"422b00d2-0df9-4778-8040-d2b175d8e67e","Type":"ContainerStarted","Data":"5b32827bf9582e31f660f79870ee15d12cab4f15af6820d27edcac1000f9c90f"} Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.599553 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" event={"ID":"a4bc890e-57a2-4633-88bf-cb66c90293e8","Type":"ContainerStarted","Data":"4183ec6db0b153f2ace68500beba4226bff72cc29dd4ac892c5f07c981682cf8"} Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.599616 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.623205 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.623826 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.637486 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.637756 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.637778 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5892t" event={"ID":"48259401-fe1f-4b48-9574-af1c32bccce5","Type":"ContainerDied","Data":"907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2"} Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.637747 4634 generic.go:334] "Generic (PLEG): container finished" podID="48259401-fe1f-4b48-9574-af1c32bccce5" containerID="907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2" exitCode=0 Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.638459 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5892t" event={"ID":"48259401-fe1f-4b48-9574-af1c32bccce5","Type":"ContainerStarted","Data":"e9b138d8bf22f4fedf480cc01b18b49aadcd86ebc07228bedd5e3afbeec42ab1"} Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.644450 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.669998 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6nrlj"] Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.672548 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" podStartSLOduration=129.672531121 podStartE2EDuration="2m9.672531121s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:47:02.666198296 +0000 UTC m=+153.234926045" watchObservedRunningTime="2025-09-29 13:47:02.672531121 +0000 UTC m=+153.241258870" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.724491 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-44875"] Sep 29 13:47:02 crc kubenswrapper[4634]: W0929 13:47:02.752610 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54680624_14fd_47bf_829b_1f0342ed7db1.slice/crio-d5fe52fd4f98b61f7cff4c44514ed9139bbca8d35ffd68b6eef7d12d3d02916b WatchSource:0}: Error finding container d5fe52fd4f98b61f7cff4c44514ed9139bbca8d35ffd68b6eef7d12d3d02916b: Status 404 returned error can't find the container with id d5fe52fd4f98b61f7cff4c44514ed9139bbca8d35ffd68b6eef7d12d3d02916b Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.774424 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.775600 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.878861 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.878989 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.879481 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.914257 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:02 crc kubenswrapper[4634]: I0929 13:47:02.964573 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.281159 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-zjrr2" Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.322377 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-md5b8" Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.416291 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:03 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:03 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:03 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.416368 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.579124 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.675521 4634 generic.go:334] "Generic (PLEG): container finished" podID="54680624-14fd-47bf-829b-1f0342ed7db1" containerID="970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1" exitCode=0 Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.675753 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44875" event={"ID":"54680624-14fd-47bf-829b-1f0342ed7db1","Type":"ContainerDied","Data":"970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1"} Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.676303 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44875" event={"ID":"54680624-14fd-47bf-829b-1f0342ed7db1","Type":"ContainerStarted","Data":"d5fe52fd4f98b61f7cff4c44514ed9139bbca8d35ffd68b6eef7d12d3d02916b"} Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.709825 4634 generic.go:334] "Generic (PLEG): container finished" podID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerID="aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc" exitCode=0 Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.710006 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nrlj" event={"ID":"bbd41564-d1da-427c-8bd6-6a2faea397cc","Type":"ContainerDied","Data":"aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc"} Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.710053 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nrlj" event={"ID":"bbd41564-d1da-427c-8bd6-6a2faea397cc","Type":"ContainerStarted","Data":"a3802097b52f4966f88650557b9ab00679502908120f4d9d6c76ef4ff5ec4234"} Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.725505 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" event={"ID":"a4bc890e-57a2-4633-88bf-cb66c90293e8","Type":"ContainerStarted","Data":"07211b46ca588d7805b8f3914ab9c79152702771e4a3ab52fffd492ec7209f8a"} Sep 29 13:47:03 crc kubenswrapper[4634]: I0929 13:47:03.741133 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"19da7e0e-5b6f-4b46-8998-a0f78da0289f","Type":"ContainerStarted","Data":"798cd64c65a0503aa96a6818631547e27b2b6c82c1168675794a8eb8673823e6"} Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.125579 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.205644 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx2fg\" (UniqueName: \"kubernetes.io/projected/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-kube-api-access-gx2fg\") pod \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.205680 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-config-volume\") pod \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.205708 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-secret-volume\") pod \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\" (UID: \"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da\") " Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.206590 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-config-volume" (OuterVolumeSpecName: "config-volume") pod "1a7bd45c-8ec4-4af0-b4cb-812ab9b173da" (UID: "1a7bd45c-8ec4-4af0-b4cb-812ab9b173da"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.206737 4634 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.212014 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-kube-api-access-gx2fg" (OuterVolumeSpecName: "kube-api-access-gx2fg") pod "1a7bd45c-8ec4-4af0-b4cb-812ab9b173da" (UID: "1a7bd45c-8ec4-4af0-b4cb-812ab9b173da"). InnerVolumeSpecName "kube-api-access-gx2fg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.212041 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1a7bd45c-8ec4-4af0-b4cb-812ab9b173da" (UID: "1a7bd45c-8ec4-4af0-b4cb-812ab9b173da"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.308206 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx2fg\" (UniqueName: \"kubernetes.io/projected/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-kube-api-access-gx2fg\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.308238 4634 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.415381 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:04 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:04 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:04 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.415781 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.750301 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"19da7e0e-5b6f-4b46-8998-a0f78da0289f","Type":"ContainerStarted","Data":"f9d4ac0d981911ac8080ad84ee6c7e508de4a2735f17a7fca5a2bb1e96840604"} Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.752927 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" event={"ID":"1a7bd45c-8ec4-4af0-b4cb-812ab9b173da","Type":"ContainerDied","Data":"27533fedb174b63fb84357702cafd910b6fa99ce4e1c500ce76d066921def8c6"} Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.752969 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27533fedb174b63fb84357702cafd910b6fa99ce4e1c500ce76d066921def8c6" Sep 29 13:47:04 crc kubenswrapper[4634]: I0929 13:47:04.752981 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd" Sep 29 13:47:05 crc kubenswrapper[4634]: I0929 13:47:05.415077 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:05 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:05 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:05 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:05 crc kubenswrapper[4634]: I0929 13:47:05.415209 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:05 crc kubenswrapper[4634]: I0929 13:47:05.767944 4634 generic.go:334] "Generic (PLEG): container finished" podID="19da7e0e-5b6f-4b46-8998-a0f78da0289f" containerID="f9d4ac0d981911ac8080ad84ee6c7e508de4a2735f17a7fca5a2bb1e96840604" exitCode=0 Sep 29 13:47:05 crc kubenswrapper[4634]: I0929 13:47:05.768482 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"19da7e0e-5b6f-4b46-8998-a0f78da0289f","Type":"ContainerDied","Data":"f9d4ac0d981911ac8080ad84ee6c7e508de4a2735f17a7fca5a2bb1e96840604"} Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.076541 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.082276 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-rgfdh" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.353552 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 13:47:06 crc kubenswrapper[4634]: E0929 13:47:06.353777 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a7bd45c-8ec4-4af0-b4cb-812ab9b173da" containerName="collect-profiles" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.353789 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a7bd45c-8ec4-4af0-b4cb-812ab9b173da" containerName="collect-profiles" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.353912 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a7bd45c-8ec4-4af0-b4cb-812ab9b173da" containerName="collect-profiles" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.354249 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.358821 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.358991 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.362273 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.418042 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:06 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:06 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:06 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.418160 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.437817 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a99d559-349e-4679-8139-0ed7a9617af7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a99d559-349e-4679-8139-0ed7a9617af7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.437928 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a99d559-349e-4679-8139-0ed7a9617af7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a99d559-349e-4679-8139-0ed7a9617af7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.539270 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a99d559-349e-4679-8139-0ed7a9617af7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a99d559-349e-4679-8139-0ed7a9617af7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.539352 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a99d559-349e-4679-8139-0ed7a9617af7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a99d559-349e-4679-8139-0ed7a9617af7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.539937 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a99d559-349e-4679-8139-0ed7a9617af7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"2a99d559-349e-4679-8139-0ed7a9617af7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.567906 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a99d559-349e-4679-8139-0ed7a9617af7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"2a99d559-349e-4679-8139-0ed7a9617af7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:06 crc kubenswrapper[4634]: I0929 13:47:06.680064 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.160576 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.171727 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.254255 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kubelet-dir\") pod \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\" (UID: \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\") " Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.254605 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kube-api-access\") pod \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\" (UID: \"19da7e0e-5b6f-4b46-8998-a0f78da0289f\") " Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.254841 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "19da7e0e-5b6f-4b46-8998-a0f78da0289f" (UID: "19da7e0e-5b6f-4b46-8998-a0f78da0289f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.255268 4634 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.267314 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "19da7e0e-5b6f-4b46-8998-a0f78da0289f" (UID: "19da7e0e-5b6f-4b46-8998-a0f78da0289f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.357008 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/19da7e0e-5b6f-4b46-8998-a0f78da0289f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.419786 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:07 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:07 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:07 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.419845 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.830869 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a99d559-349e-4679-8139-0ed7a9617af7","Type":"ContainerStarted","Data":"31982298718cc1b239eb69b2d6dc7c4a3ab786d1607be1ae600e75220b5e6704"} Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.858599 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"19da7e0e-5b6f-4b46-8998-a0f78da0289f","Type":"ContainerDied","Data":"798cd64c65a0503aa96a6818631547e27b2b6c82c1168675794a8eb8673823e6"} Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.858642 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="798cd64c65a0503aa96a6818631547e27b2b6c82c1168675794a8eb8673823e6" Sep 29 13:47:07 crc kubenswrapper[4634]: I0929 13:47:07.858744 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 13:47:08 crc kubenswrapper[4634]: I0929 13:47:08.416415 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:08 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:08 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:08 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:08 crc kubenswrapper[4634]: I0929 13:47:08.416678 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:08 crc kubenswrapper[4634]: I0929 13:47:08.871004 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a99d559-349e-4679-8139-0ed7a9617af7","Type":"ContainerStarted","Data":"d59fd481f0985d43717083f312ec82194b310d7e7d262647543f603d3827f3c7"} Sep 29 13:47:09 crc kubenswrapper[4634]: I0929 13:47:09.416219 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:09 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:09 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:09 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:09 crc kubenswrapper[4634]: I0929 13:47:09.416287 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:09 crc kubenswrapper[4634]: I0929 13:47:09.882828 4634 generic.go:334] "Generic (PLEG): container finished" podID="2a99d559-349e-4679-8139-0ed7a9617af7" containerID="d59fd481f0985d43717083f312ec82194b310d7e7d262647543f603d3827f3c7" exitCode=0 Sep 29 13:47:09 crc kubenswrapper[4634]: I0929 13:47:09.882923 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a99d559-349e-4679-8139-0ed7a9617af7","Type":"ContainerDied","Data":"d59fd481f0985d43717083f312ec82194b310d7e7d262647543f603d3827f3c7"} Sep 29 13:47:10 crc kubenswrapper[4634]: I0929 13:47:10.416207 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:10 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:10 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:10 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:10 crc kubenswrapper[4634]: I0929 13:47:10.416638 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:11 crc kubenswrapper[4634]: I0929 13:47:11.168564 4634 patch_prober.go:28] interesting pod/downloads-7954f5f757-kcrwt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Sep 29 13:47:11 crc kubenswrapper[4634]: I0929 13:47:11.168604 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-kcrwt" podUID="cfca5304-d886-40b0-93ea-cb412ba053f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Sep 29 13:47:11 crc kubenswrapper[4634]: I0929 13:47:11.168518 4634 patch_prober.go:28] interesting pod/downloads-7954f5f757-kcrwt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" start-of-body= Sep 29 13:47:11 crc kubenswrapper[4634]: I0929 13:47:11.168721 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-kcrwt" podUID="cfca5304-d886-40b0-93ea-cb412ba053f9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.7:8080/\": dial tcp 10.217.0.7:8080: connect: connection refused" Sep 29 13:47:11 crc kubenswrapper[4634]: I0929 13:47:11.416416 4634 patch_prober.go:28] interesting pod/console-f9d7485db-5nvq7 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Sep 29 13:47:11 crc kubenswrapper[4634]: I0929 13:47:11.416462 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-5nvq7" podUID="95f9d479-e9b9-4086-8792-83625bfaff6e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.5:8443/health\": dial tcp 10.217.0.5:8443: connect: connection refused" Sep 29 13:47:11 crc kubenswrapper[4634]: I0929 13:47:11.416528 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:11 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:11 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:11 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:11 crc kubenswrapper[4634]: I0929 13:47:11.416585 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:12 crc kubenswrapper[4634]: I0929 13:47:12.415702 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:12 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:12 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:12 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:12 crc kubenswrapper[4634]: I0929 13:47:12.415783 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:13 crc kubenswrapper[4634]: I0929 13:47:13.427977 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:13 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:13 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:13 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:13 crc kubenswrapper[4634]: I0929 13:47:13.428032 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:14 crc kubenswrapper[4634]: I0929 13:47:14.397067 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:47:14 crc kubenswrapper[4634]: I0929 13:47:14.397142 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:47:14 crc kubenswrapper[4634]: I0929 13:47:14.414736 4634 patch_prober.go:28] interesting pod/router-default-5444994796-x2lkr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 13:47:14 crc kubenswrapper[4634]: [-]has-synced failed: reason withheld Sep 29 13:47:14 crc kubenswrapper[4634]: [+]process-running ok Sep 29 13:47:14 crc kubenswrapper[4634]: healthz check failed Sep 29 13:47:14 crc kubenswrapper[4634]: I0929 13:47:14.414810 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-x2lkr" podUID="4857752f-13d0-4996-b112-6410097f9c28" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 13:47:15 crc kubenswrapper[4634]: I0929 13:47:15.415004 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:47:15 crc kubenswrapper[4634]: I0929 13:47:15.418459 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-x2lkr" Sep 29 13:47:15 crc kubenswrapper[4634]: I0929 13:47:15.736665 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:47:15 crc kubenswrapper[4634]: I0929 13:47:15.744070 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/85c1b26c-a922-4d3c-934f-e6968735a76e-metrics-certs\") pod \"network-metrics-daemon-nl5xm\" (UID: \"85c1b26c-a922-4d3c-934f-e6968735a76e\") " pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:47:15 crc kubenswrapper[4634]: I0929 13:47:15.847073 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nl5xm" Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.124849 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.273216 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a99d559-349e-4679-8139-0ed7a9617af7-kube-api-access\") pod \"2a99d559-349e-4679-8139-0ed7a9617af7\" (UID: \"2a99d559-349e-4679-8139-0ed7a9617af7\") " Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.273278 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a99d559-349e-4679-8139-0ed7a9617af7-kubelet-dir\") pod \"2a99d559-349e-4679-8139-0ed7a9617af7\" (UID: \"2a99d559-349e-4679-8139-0ed7a9617af7\") " Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.273389 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2a99d559-349e-4679-8139-0ed7a9617af7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2a99d559-349e-4679-8139-0ed7a9617af7" (UID: "2a99d559-349e-4679-8139-0ed7a9617af7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.273519 4634 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2a99d559-349e-4679-8139-0ed7a9617af7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.295919 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a99d559-349e-4679-8139-0ed7a9617af7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2a99d559-349e-4679-8139-0ed7a9617af7" (UID: "2a99d559-349e-4679-8139-0ed7a9617af7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.375117 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2a99d559-349e-4679-8139-0ed7a9617af7-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.963745 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"2a99d559-349e-4679-8139-0ed7a9617af7","Type":"ContainerDied","Data":"31982298718cc1b239eb69b2d6dc7c4a3ab786d1607be1ae600e75220b5e6704"} Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.963781 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31982298718cc1b239eb69b2d6dc7c4a3ab786d1607be1ae600e75220b5e6704" Sep 29 13:47:18 crc kubenswrapper[4634]: I0929 13:47:18.963833 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 13:47:21 crc kubenswrapper[4634]: I0929 13:47:21.154129 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-kcrwt" Sep 29 13:47:21 crc kubenswrapper[4634]: I0929 13:47:21.419583 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:47:21 crc kubenswrapper[4634]: I0929 13:47:21.428711 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:47:22 crc kubenswrapper[4634]: I0929 13:47:22.006111 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:47:30 crc kubenswrapper[4634]: E0929 13:47:30.808970 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 13:47:30 crc kubenswrapper[4634]: E0929 13:47:30.809816 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gpfs9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7dc9w_openshift-marketplace(422b00d2-0df9-4778-8040-d2b175d8e67e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 13:47:30 crc kubenswrapper[4634]: E0929 13:47:30.812055 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7dc9w" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" Sep 29 13:47:31 crc kubenswrapper[4634]: I0929 13:47:31.827887 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-jv7wv" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.254343 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7dc9w" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.330123 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.330460 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ffj5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-lx58l_openshift-marketplace(df83a2e1-7545-44da-91a3-72925fe5fd1b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.331654 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-lx58l" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.383109 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.383409 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-62w8c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-q4lqj_openshift-marketplace(27ebe57d-6e97-49c8-8106-f4e6983b3e30): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.385966 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-q4lqj" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.400372 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.400500 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wr9ff,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gpvqs_openshift-marketplace(fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.400757 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.400807 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5kpct,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-44875_openshift-marketplace(54680624-14fd-47bf-829b-1f0342ed7db1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.403146 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-44875" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" Sep 29 13:47:32 crc kubenswrapper[4634]: E0929 13:47:32.403188 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gpvqs" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" Sep 29 13:47:32 crc kubenswrapper[4634]: I0929 13:47:32.723023 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nl5xm"] Sep 29 13:47:32 crc kubenswrapper[4634]: W0929 13:47:32.733572 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85c1b26c_a922_4d3c_934f_e6968735a76e.slice/crio-acd245bc3291e85ef0b42e38fbee183b9d4c6d7a78221205f2bb312054e8e1bc WatchSource:0}: Error finding container acd245bc3291e85ef0b42e38fbee183b9d4c6d7a78221205f2bb312054e8e1bc: Status 404 returned error can't find the container with id acd245bc3291e85ef0b42e38fbee183b9d4c6d7a78221205f2bb312054e8e1bc Sep 29 13:47:33 crc kubenswrapper[4634]: I0929 13:47:33.039063 4634 generic.go:334] "Generic (PLEG): container finished" podID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerID="972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae" exitCode=0 Sep 29 13:47:33 crc kubenswrapper[4634]: I0929 13:47:33.039193 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnr2w" event={"ID":"e93988bf-f719-4ad5-a46c-73e3877b3e6b","Type":"ContainerDied","Data":"972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae"} Sep 29 13:47:33 crc kubenswrapper[4634]: I0929 13:47:33.042796 4634 generic.go:334] "Generic (PLEG): container finished" podID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerID="f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef" exitCode=0 Sep 29 13:47:33 crc kubenswrapper[4634]: I0929 13:47:33.042853 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nrlj" event={"ID":"bbd41564-d1da-427c-8bd6-6a2faea397cc","Type":"ContainerDied","Data":"f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef"} Sep 29 13:47:33 crc kubenswrapper[4634]: I0929 13:47:33.044979 4634 generic.go:334] "Generic (PLEG): container finished" podID="48259401-fe1f-4b48-9574-af1c32bccce5" containerID="c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17" exitCode=0 Sep 29 13:47:33 crc kubenswrapper[4634]: I0929 13:47:33.045287 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5892t" event={"ID":"48259401-fe1f-4b48-9574-af1c32bccce5","Type":"ContainerDied","Data":"c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17"} Sep 29 13:47:33 crc kubenswrapper[4634]: I0929 13:47:33.047657 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" event={"ID":"85c1b26c-a922-4d3c-934f-e6968735a76e","Type":"ContainerStarted","Data":"b0b45e66671982f7640130560639845eb9340939e0d0cd9dc5b535f213ae9157"} Sep 29 13:47:33 crc kubenswrapper[4634]: I0929 13:47:33.047675 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" event={"ID":"85c1b26c-a922-4d3c-934f-e6968735a76e","Type":"ContainerStarted","Data":"acd245bc3291e85ef0b42e38fbee183b9d4c6d7a78221205f2bb312054e8e1bc"} Sep 29 13:47:33 crc kubenswrapper[4634]: E0929 13:47:33.052301 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-44875" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" Sep 29 13:47:33 crc kubenswrapper[4634]: E0929 13:47:33.052643 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gpvqs" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" Sep 29 13:47:33 crc kubenswrapper[4634]: E0929 13:47:33.052767 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-q4lqj" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" Sep 29 13:47:33 crc kubenswrapper[4634]: E0929 13:47:33.053601 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-lx58l" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" Sep 29 13:47:34 crc kubenswrapper[4634]: I0929 13:47:34.055138 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nrlj" event={"ID":"bbd41564-d1da-427c-8bd6-6a2faea397cc","Type":"ContainerStarted","Data":"79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e"} Sep 29 13:47:34 crc kubenswrapper[4634]: I0929 13:47:34.057639 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5892t" event={"ID":"48259401-fe1f-4b48-9574-af1c32bccce5","Type":"ContainerStarted","Data":"383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff"} Sep 29 13:47:34 crc kubenswrapper[4634]: I0929 13:47:34.059416 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nl5xm" event={"ID":"85c1b26c-a922-4d3c-934f-e6968735a76e","Type":"ContainerStarted","Data":"2d43540885c6b158e8a9a968cdf368a6e6e4da6270db2d81eeac06a7500b89a5"} Sep 29 13:47:34 crc kubenswrapper[4634]: I0929 13:47:34.061444 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnr2w" event={"ID":"e93988bf-f719-4ad5-a46c-73e3877b3e6b","Type":"ContainerStarted","Data":"2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486"} Sep 29 13:47:34 crc kubenswrapper[4634]: I0929 13:47:34.099692 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6nrlj" podStartSLOduration=3.184904932 podStartE2EDuration="33.099674503s" podCreationTimestamp="2025-09-29 13:47:01 +0000 UTC" firstStartedPulling="2025-09-29 13:47:03.714843653 +0000 UTC m=+154.283571402" lastFinishedPulling="2025-09-29 13:47:33.629613224 +0000 UTC m=+184.198340973" observedRunningTime="2025-09-29 13:47:34.081051176 +0000 UTC m=+184.649778925" watchObservedRunningTime="2025-09-29 13:47:34.099674503 +0000 UTC m=+184.668402252" Sep 29 13:47:34 crc kubenswrapper[4634]: I0929 13:47:34.121207 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5892t" podStartSLOduration=3.2044002 podStartE2EDuration="34.121191911s" podCreationTimestamp="2025-09-29 13:47:00 +0000 UTC" firstStartedPulling="2025-09-29 13:47:02.64766485 +0000 UTC m=+153.216392599" lastFinishedPulling="2025-09-29 13:47:33.564456551 +0000 UTC m=+184.133184310" observedRunningTime="2025-09-29 13:47:34.102920534 +0000 UTC m=+184.671648283" watchObservedRunningTime="2025-09-29 13:47:34.121191911 +0000 UTC m=+184.689919660" Sep 29 13:47:34 crc kubenswrapper[4634]: I0929 13:47:34.122178 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qnr2w" podStartSLOduration=5.183908395 podStartE2EDuration="37.122171338s" podCreationTimestamp="2025-09-29 13:46:57 +0000 UTC" firstStartedPulling="2025-09-29 13:47:01.50326319 +0000 UTC m=+152.071990939" lastFinishedPulling="2025-09-29 13:47:33.441526133 +0000 UTC m=+184.010253882" observedRunningTime="2025-09-29 13:47:34.119647879 +0000 UTC m=+184.688375628" watchObservedRunningTime="2025-09-29 13:47:34.122171338 +0000 UTC m=+184.690899087" Sep 29 13:47:38 crc kubenswrapper[4634]: I0929 13:47:38.150587 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 13:47:38 crc kubenswrapper[4634]: I0929 13:47:38.169036 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-nl5xm" podStartSLOduration=165.168980231 podStartE2EDuration="2m45.168980231s" podCreationTimestamp="2025-09-29 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:47:34.134921023 +0000 UTC m=+184.703648772" watchObservedRunningTime="2025-09-29 13:47:38.168980231 +0000 UTC m=+188.737707980" Sep 29 13:47:38 crc kubenswrapper[4634]: I0929 13:47:38.719197 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:47:38 crc kubenswrapper[4634]: I0929 13:47:38.719586 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:47:38 crc kubenswrapper[4634]: I0929 13:47:38.854597 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:47:39 crc kubenswrapper[4634]: I0929 13:47:39.141495 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:47:40 crc kubenswrapper[4634]: I0929 13:47:40.765428 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:40 crc kubenswrapper[4634]: I0929 13:47:40.765515 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:40 crc kubenswrapper[4634]: I0929 13:47:40.836021 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:41 crc kubenswrapper[4634]: I0929 13:47:41.179730 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:41 crc kubenswrapper[4634]: I0929 13:47:41.602705 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5892t"] Sep 29 13:47:42 crc kubenswrapper[4634]: I0929 13:47:42.213773 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:42 crc kubenswrapper[4634]: I0929 13:47:42.213963 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:42 crc kubenswrapper[4634]: I0929 13:47:42.263773 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.118349 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5892t" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" containerName="registry-server" containerID="cri-o://383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff" gracePeriod=2 Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.175160 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.599851 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.628049 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-utilities\") pod \"48259401-fe1f-4b48-9574-af1c32bccce5\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.628124 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-catalog-content\") pod \"48259401-fe1f-4b48-9574-af1c32bccce5\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.628171 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gr9bx\" (UniqueName: \"kubernetes.io/projected/48259401-fe1f-4b48-9574-af1c32bccce5-kube-api-access-gr9bx\") pod \"48259401-fe1f-4b48-9574-af1c32bccce5\" (UID: \"48259401-fe1f-4b48-9574-af1c32bccce5\") " Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.631296 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-utilities" (OuterVolumeSpecName: "utilities") pod "48259401-fe1f-4b48-9574-af1c32bccce5" (UID: "48259401-fe1f-4b48-9574-af1c32bccce5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.642261 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48259401-fe1f-4b48-9574-af1c32bccce5-kube-api-access-gr9bx" (OuterVolumeSpecName: "kube-api-access-gr9bx") pod "48259401-fe1f-4b48-9574-af1c32bccce5" (UID: "48259401-fe1f-4b48-9574-af1c32bccce5"). InnerVolumeSpecName "kube-api-access-gr9bx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.654938 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48259401-fe1f-4b48-9574-af1c32bccce5" (UID: "48259401-fe1f-4b48-9574-af1c32bccce5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.729721 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.729760 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48259401-fe1f-4b48-9574-af1c32bccce5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:43 crc kubenswrapper[4634]: I0929 13:47:43.729777 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gr9bx\" (UniqueName: \"kubernetes.io/projected/48259401-fe1f-4b48-9574-af1c32bccce5-kube-api-access-gr9bx\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.151606 4634 generic.go:334] "Generic (PLEG): container finished" podID="48259401-fe1f-4b48-9574-af1c32bccce5" containerID="383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff" exitCode=0 Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.151716 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5892t" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.151727 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5892t" event={"ID":"48259401-fe1f-4b48-9574-af1c32bccce5","Type":"ContainerDied","Data":"383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff"} Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.151813 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5892t" event={"ID":"48259401-fe1f-4b48-9574-af1c32bccce5","Type":"ContainerDied","Data":"e9b138d8bf22f4fedf480cc01b18b49aadcd86ebc07228bedd5e3afbeec42ab1"} Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.151848 4634 scope.go:117] "RemoveContainer" containerID="383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.184776 4634 scope.go:117] "RemoveContainer" containerID="c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.185534 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5892t"] Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.189151 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5892t"] Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.208883 4634 scope.go:117] "RemoveContainer" containerID="907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.226469 4634 scope.go:117] "RemoveContainer" containerID="383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff" Sep 29 13:47:44 crc kubenswrapper[4634]: E0929 13:47:44.227032 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff\": container with ID starting with 383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff not found: ID does not exist" containerID="383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.227098 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff"} err="failed to get container status \"383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff\": rpc error: code = NotFound desc = could not find container \"383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff\": container with ID starting with 383ed4e0161a338cad460ac1b9d70b7f39fabe5ef486c626219c6b41e7116fff not found: ID does not exist" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.227155 4634 scope.go:117] "RemoveContainer" containerID="c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17" Sep 29 13:47:44 crc kubenswrapper[4634]: E0929 13:47:44.227558 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17\": container with ID starting with c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17 not found: ID does not exist" containerID="c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.227648 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17"} err="failed to get container status \"c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17\": rpc error: code = NotFound desc = could not find container \"c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17\": container with ID starting with c4344b1e49d18e75e27aabe6769d40d47526ae43ebabb1fa47faa82750726b17 not found: ID does not exist" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.227724 4634 scope.go:117] "RemoveContainer" containerID="907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2" Sep 29 13:47:44 crc kubenswrapper[4634]: E0929 13:47:44.228124 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2\": container with ID starting with 907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2 not found: ID does not exist" containerID="907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.228177 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2"} err="failed to get container status \"907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2\": rpc error: code = NotFound desc = could not find container \"907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2\": container with ID starting with 907cdf528b7ce0f7b95e02d5164a54ea4521b97679ebde5a5a987e0efa5070f2 not found: ID does not exist" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.396716 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.396803 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:47:44 crc kubenswrapper[4634]: I0929 13:47:44.577537 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6nrlj"] Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.180147 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6nrlj" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerName="registry-server" containerID="cri-o://79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e" gracePeriod=2 Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.718965 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.862724 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-utilities\") pod \"bbd41564-d1da-427c-8bd6-6a2faea397cc\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.862766 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvthq\" (UniqueName: \"kubernetes.io/projected/bbd41564-d1da-427c-8bd6-6a2faea397cc-kube-api-access-fvthq\") pod \"bbd41564-d1da-427c-8bd6-6a2faea397cc\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.862848 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-catalog-content\") pod \"bbd41564-d1da-427c-8bd6-6a2faea397cc\" (UID: \"bbd41564-d1da-427c-8bd6-6a2faea397cc\") " Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.864239 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-utilities" (OuterVolumeSpecName: "utilities") pod "bbd41564-d1da-427c-8bd6-6a2faea397cc" (UID: "bbd41564-d1da-427c-8bd6-6a2faea397cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.879834 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbd41564-d1da-427c-8bd6-6a2faea397cc-kube-api-access-fvthq" (OuterVolumeSpecName: "kube-api-access-fvthq") pod "bbd41564-d1da-427c-8bd6-6a2faea397cc" (UID: "bbd41564-d1da-427c-8bd6-6a2faea397cc"). InnerVolumeSpecName "kube-api-access-fvthq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.938354 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bbd41564-d1da-427c-8bd6-6a2faea397cc" (UID: "bbd41564-d1da-427c-8bd6-6a2faea397cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.964496 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.964537 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bbd41564-d1da-427c-8bd6-6a2faea397cc-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:45 crc kubenswrapper[4634]: I0929 13:47:45.964553 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvthq\" (UniqueName: \"kubernetes.io/projected/bbd41564-d1da-427c-8bd6-6a2faea397cc-kube-api-access-fvthq\") on node \"crc\" DevicePath \"\"" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.117760 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" path="/var/lib/kubelet/pods/48259401-fe1f-4b48-9574-af1c32bccce5/volumes" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.187184 4634 generic.go:334] "Generic (PLEG): container finished" podID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerID="79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e" exitCode=0 Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.187310 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nrlj" event={"ID":"bbd41564-d1da-427c-8bd6-6a2faea397cc","Type":"ContainerDied","Data":"79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e"} Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.187342 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6nrlj" event={"ID":"bbd41564-d1da-427c-8bd6-6a2faea397cc","Type":"ContainerDied","Data":"a3802097b52f4966f88650557b9ab00679502908120f4d9d6c76ef4ff5ec4234"} Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.187364 4634 scope.go:117] "RemoveContainer" containerID="79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.187469 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6nrlj" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.190223 4634 generic.go:334] "Generic (PLEG): container finished" podID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerID="633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400" exitCode=0 Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.190252 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx58l" event={"ID":"df83a2e1-7545-44da-91a3-72925fe5fd1b","Type":"ContainerDied","Data":"633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400"} Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.208561 4634 scope.go:117] "RemoveContainer" containerID="f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.228585 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6nrlj"] Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.232025 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6nrlj"] Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.239648 4634 scope.go:117] "RemoveContainer" containerID="aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.256363 4634 scope.go:117] "RemoveContainer" containerID="79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e" Sep 29 13:47:46 crc kubenswrapper[4634]: E0929 13:47:46.256885 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e\": container with ID starting with 79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e not found: ID does not exist" containerID="79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.256928 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e"} err="failed to get container status \"79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e\": rpc error: code = NotFound desc = could not find container \"79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e\": container with ID starting with 79501da31e4e9ded05f9396ed7fcee5adfbf826203537883b28eb13ec6abdd7e not found: ID does not exist" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.256956 4634 scope.go:117] "RemoveContainer" containerID="f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef" Sep 29 13:47:46 crc kubenswrapper[4634]: E0929 13:47:46.257351 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef\": container with ID starting with f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef not found: ID does not exist" containerID="f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.257388 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef"} err="failed to get container status \"f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef\": rpc error: code = NotFound desc = could not find container \"f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef\": container with ID starting with f652df8a26e3d9138bda6192ab6f236b4939af1e2856387d7e06ad2861daf1ef not found: ID does not exist" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.257416 4634 scope.go:117] "RemoveContainer" containerID="aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc" Sep 29 13:47:46 crc kubenswrapper[4634]: E0929 13:47:46.257793 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc\": container with ID starting with aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc not found: ID does not exist" containerID="aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc" Sep 29 13:47:46 crc kubenswrapper[4634]: I0929 13:47:46.257839 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc"} err="failed to get container status \"aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc\": rpc error: code = NotFound desc = could not find container \"aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc\": container with ID starting with aa1e528cdd32763c95948a8e1f86833e33f4290530552e6747411aeb94112cbc not found: ID does not exist" Sep 29 13:47:47 crc kubenswrapper[4634]: I0929 13:47:47.197172 4634 generic.go:334] "Generic (PLEG): container finished" podID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerID="e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507" exitCode=0 Sep 29 13:47:47 crc kubenswrapper[4634]: I0929 13:47:47.197266 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4lqj" event={"ID":"27ebe57d-6e97-49c8-8106-f4e6983b3e30","Type":"ContainerDied","Data":"e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507"} Sep 29 13:47:47 crc kubenswrapper[4634]: I0929 13:47:47.204253 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44875" event={"ID":"54680624-14fd-47bf-829b-1f0342ed7db1","Type":"ContainerStarted","Data":"740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0"} Sep 29 13:47:47 crc kubenswrapper[4634]: I0929 13:47:47.208930 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx58l" event={"ID":"df83a2e1-7545-44da-91a3-72925fe5fd1b","Type":"ContainerStarted","Data":"b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a"} Sep 29 13:47:47 crc kubenswrapper[4634]: I0929 13:47:47.228161 4634 generic.go:334] "Generic (PLEG): container finished" podID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerID="21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe" exitCode=0 Sep 29 13:47:47 crc kubenswrapper[4634]: I0929 13:47:47.228263 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dc9w" event={"ID":"422b00d2-0df9-4778-8040-d2b175d8e67e","Type":"ContainerDied","Data":"21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe"} Sep 29 13:47:47 crc kubenswrapper[4634]: I0929 13:47:47.238302 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gpvqs" event={"ID":"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8","Type":"ContainerStarted","Data":"2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379"} Sep 29 13:47:47 crc kubenswrapper[4634]: I0929 13:47:47.283520 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lx58l" podStartSLOduration=4.202956056 podStartE2EDuration="49.283502823s" podCreationTimestamp="2025-09-29 13:46:58 +0000 UTC" firstStartedPulling="2025-09-29 13:47:01.539914519 +0000 UTC m=+152.108642268" lastFinishedPulling="2025-09-29 13:47:46.620461246 +0000 UTC m=+197.189189035" observedRunningTime="2025-09-29 13:47:47.280728545 +0000 UTC m=+197.849456304" watchObservedRunningTime="2025-09-29 13:47:47.283502823 +0000 UTC m=+197.852230572" Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.117783 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" path="/var/lib/kubelet/pods/bbd41564-d1da-427c-8bd6-6a2faea397cc/volumes" Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.254033 4634 generic.go:334] "Generic (PLEG): container finished" podID="54680624-14fd-47bf-829b-1f0342ed7db1" containerID="740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0" exitCode=0 Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.254117 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44875" event={"ID":"54680624-14fd-47bf-829b-1f0342ed7db1","Type":"ContainerDied","Data":"740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0"} Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.257624 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dc9w" event={"ID":"422b00d2-0df9-4778-8040-d2b175d8e67e","Type":"ContainerStarted","Data":"2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a"} Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.260376 4634 generic.go:334] "Generic (PLEG): container finished" podID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerID="2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379" exitCode=0 Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.260417 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gpvqs" event={"ID":"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8","Type":"ContainerDied","Data":"2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379"} Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.263870 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4lqj" event={"ID":"27ebe57d-6e97-49c8-8106-f4e6983b3e30","Type":"ContainerStarted","Data":"aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee"} Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.291389 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q4lqj" podStartSLOduration=5.189710336 podStartE2EDuration="51.291370456s" podCreationTimestamp="2025-09-29 13:46:57 +0000 UTC" firstStartedPulling="2025-09-29 13:47:01.522156345 +0000 UTC m=+152.090884084" lastFinishedPulling="2025-09-29 13:47:47.623816445 +0000 UTC m=+198.192544204" observedRunningTime="2025-09-29 13:47:48.289959707 +0000 UTC m=+198.858687456" watchObservedRunningTime="2025-09-29 13:47:48.291370456 +0000 UTC m=+198.860098205" Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.323278 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7dc9w" podStartSLOduration=4.211658171 podStartE2EDuration="49.323258703s" podCreationTimestamp="2025-09-29 13:46:59 +0000 UTC" firstStartedPulling="2025-09-29 13:47:02.580827451 +0000 UTC m=+153.149555200" lastFinishedPulling="2025-09-29 13:47:47.692427983 +0000 UTC m=+198.261155732" observedRunningTime="2025-09-29 13:47:48.32098316 +0000 UTC m=+198.889710899" watchObservedRunningTime="2025-09-29 13:47:48.323258703 +0000 UTC m=+198.891986452" Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.660476 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.660833 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:47:48 crc kubenswrapper[4634]: I0929 13:47:48.705525 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:47:49 crc kubenswrapper[4634]: I0929 13:47:49.276502 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gpvqs" event={"ID":"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8","Type":"ContainerStarted","Data":"71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c"} Sep 29 13:47:49 crc kubenswrapper[4634]: I0929 13:47:49.280383 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44875" event={"ID":"54680624-14fd-47bf-829b-1f0342ed7db1","Type":"ContainerStarted","Data":"9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223"} Sep 29 13:47:49 crc kubenswrapper[4634]: I0929 13:47:49.317217 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-44875" podStartSLOduration=4.280385868 podStartE2EDuration="49.31719449s" podCreationTimestamp="2025-09-29 13:47:00 +0000 UTC" firstStartedPulling="2025-09-29 13:47:03.681611859 +0000 UTC m=+154.250339608" lastFinishedPulling="2025-09-29 13:47:48.71842048 +0000 UTC m=+199.287148230" observedRunningTime="2025-09-29 13:47:49.313712492 +0000 UTC m=+199.882440231" watchObservedRunningTime="2025-09-29 13:47:49.31719449 +0000 UTC m=+199.885922249" Sep 29 13:47:49 crc kubenswrapper[4634]: I0929 13:47:49.317884 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gpvqs" podStartSLOduration=4.230999585 podStartE2EDuration="51.317878488s" podCreationTimestamp="2025-09-29 13:46:58 +0000 UTC" firstStartedPulling="2025-09-29 13:47:01.5733778 +0000 UTC m=+152.142105549" lastFinishedPulling="2025-09-29 13:47:48.660256703 +0000 UTC m=+199.228984452" observedRunningTime="2025-09-29 13:47:49.298669144 +0000 UTC m=+199.867396883" watchObservedRunningTime="2025-09-29 13:47:49.317878488 +0000 UTC m=+199.886606247" Sep 29 13:47:50 crc kubenswrapper[4634]: I0929 13:47:50.309957 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:50 crc kubenswrapper[4634]: I0929 13:47:50.310267 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:50 crc kubenswrapper[4634]: I0929 13:47:50.349167 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:47:52 crc kubenswrapper[4634]: I0929 13:47:52.219345 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:52 crc kubenswrapper[4634]: I0929 13:47:52.219952 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:47:53 crc kubenswrapper[4634]: I0929 13:47:53.269287 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-44875" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="registry-server" probeResult="failure" output=< Sep 29 13:47:53 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 13:47:53 crc kubenswrapper[4634]: > Sep 29 13:47:56 crc kubenswrapper[4634]: I0929 13:47:56.776414 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vvk4f"] Sep 29 13:47:58 crc kubenswrapper[4634]: I0929 13:47:58.235198 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:47:58 crc kubenswrapper[4634]: I0929 13:47:58.235747 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:47:58 crc kubenswrapper[4634]: I0929 13:47:58.275580 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:47:58 crc kubenswrapper[4634]: I0929 13:47:58.388440 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:47:58 crc kubenswrapper[4634]: I0929 13:47:58.698020 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:47:58 crc kubenswrapper[4634]: I0929 13:47:58.726229 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:47:58 crc kubenswrapper[4634]: I0929 13:47:58.726277 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:47:58 crc kubenswrapper[4634]: I0929 13:47:58.759008 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:47:59 crc kubenswrapper[4634]: I0929 13:47:59.394193 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:47:59 crc kubenswrapper[4634]: I0929 13:47:59.722554 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lx58l"] Sep 29 13:47:59 crc kubenswrapper[4634]: I0929 13:47:59.722789 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lx58l" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerName="registry-server" containerID="cri-o://b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a" gracePeriod=2 Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.069459 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.161967 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffj5t\" (UniqueName: \"kubernetes.io/projected/df83a2e1-7545-44da-91a3-72925fe5fd1b-kube-api-access-ffj5t\") pod \"df83a2e1-7545-44da-91a3-72925fe5fd1b\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.162008 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-catalog-content\") pod \"df83a2e1-7545-44da-91a3-72925fe5fd1b\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.162056 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-utilities\") pod \"df83a2e1-7545-44da-91a3-72925fe5fd1b\" (UID: \"df83a2e1-7545-44da-91a3-72925fe5fd1b\") " Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.163455 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-utilities" (OuterVolumeSpecName: "utilities") pod "df83a2e1-7545-44da-91a3-72925fe5fd1b" (UID: "df83a2e1-7545-44da-91a3-72925fe5fd1b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.173041 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df83a2e1-7545-44da-91a3-72925fe5fd1b-kube-api-access-ffj5t" (OuterVolumeSpecName: "kube-api-access-ffj5t") pod "df83a2e1-7545-44da-91a3-72925fe5fd1b" (UID: "df83a2e1-7545-44da-91a3-72925fe5fd1b"). InnerVolumeSpecName "kube-api-access-ffj5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.216212 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df83a2e1-7545-44da-91a3-72925fe5fd1b" (UID: "df83a2e1-7545-44da-91a3-72925fe5fd1b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.262679 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.262946 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffj5t\" (UniqueName: \"kubernetes.io/projected/df83a2e1-7545-44da-91a3-72925fe5fd1b-kube-api-access-ffj5t\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.263022 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df83a2e1-7545-44da-91a3-72925fe5fd1b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.359868 4634 generic.go:334] "Generic (PLEG): container finished" podID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerID="b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a" exitCode=0 Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.360129 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx58l" event={"ID":"df83a2e1-7545-44da-91a3-72925fe5fd1b","Type":"ContainerDied","Data":"b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a"} Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.360623 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx58l" event={"ID":"df83a2e1-7545-44da-91a3-72925fe5fd1b","Type":"ContainerDied","Data":"dd0eab8e5e3a49ee6f6851ad035f8c3b6cb7c4d4095b8e7ebe9b91b904b8760d"} Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.360647 4634 scope.go:117] "RemoveContainer" containerID="b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.360222 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lx58l" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.366634 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.375566 4634 scope.go:117] "RemoveContainer" containerID="633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.399667 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lx58l"] Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.402280 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lx58l"] Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.456312 4634 scope.go:117] "RemoveContainer" containerID="52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.469049 4634 scope.go:117] "RemoveContainer" containerID="b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a" Sep 29 13:48:00 crc kubenswrapper[4634]: E0929 13:48:00.469609 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a\": container with ID starting with b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a not found: ID does not exist" containerID="b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.469641 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a"} err="failed to get container status \"b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a\": rpc error: code = NotFound desc = could not find container \"b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a\": container with ID starting with b2e926000b6a4431576345884ba24edd233f93b647d36efa3713a1185a555e1a not found: ID does not exist" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.469663 4634 scope.go:117] "RemoveContainer" containerID="633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400" Sep 29 13:48:00 crc kubenswrapper[4634]: E0929 13:48:00.470222 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400\": container with ID starting with 633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400 not found: ID does not exist" containerID="633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.470340 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400"} err="failed to get container status \"633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400\": rpc error: code = NotFound desc = could not find container \"633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400\": container with ID starting with 633ccc93bc0278a4f3641d9cdbc40e45e3a68195958a4f7b0b156de2fe0e6400 not found: ID does not exist" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.470419 4634 scope.go:117] "RemoveContainer" containerID="52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019" Sep 29 13:48:00 crc kubenswrapper[4634]: E0929 13:48:00.470665 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019\": container with ID starting with 52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019 not found: ID does not exist" containerID="52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019" Sep 29 13:48:00 crc kubenswrapper[4634]: I0929 13:48:00.470742 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019"} err="failed to get container status \"52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019\": rpc error: code = NotFound desc = could not find container \"52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019\": container with ID starting with 52b93099a7492b99cd60297141c16bfc92774d29de3bb0fc5be0ce8776538019 not found: ID does not exist" Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.127705 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gpvqs"] Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.366609 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gpvqs" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerName="registry-server" containerID="cri-o://71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c" gracePeriod=2 Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.713952 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.900342 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-utilities\") pod \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.900402 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wr9ff\" (UniqueName: \"kubernetes.io/projected/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-kube-api-access-wr9ff\") pod \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.900430 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-catalog-content\") pod \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\" (UID: \"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8\") " Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.901252 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-utilities" (OuterVolumeSpecName: "utilities") pod "fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" (UID: "fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.908303 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-kube-api-access-wr9ff" (OuterVolumeSpecName: "kube-api-access-wr9ff") pod "fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" (UID: "fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8"). InnerVolumeSpecName "kube-api-access-wr9ff". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:48:01 crc kubenswrapper[4634]: I0929 13:48:01.944312 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" (UID: "fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.006821 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.006861 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wr9ff\" (UniqueName: \"kubernetes.io/projected/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-kube-api-access-wr9ff\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.006871 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.115245 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" path="/var/lib/kubelet/pods/df83a2e1-7545-44da-91a3-72925fe5fd1b/volumes" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.262642 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.298629 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.372779 4634 generic.go:334] "Generic (PLEG): container finished" podID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerID="71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c" exitCode=0 Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.373494 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gpvqs" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.373807 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gpvqs" event={"ID":"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8","Type":"ContainerDied","Data":"71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c"} Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.373875 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gpvqs" event={"ID":"fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8","Type":"ContainerDied","Data":"c28494fbbf49f0ab48ec4b9828a2fe59d3c0f2125916466565693961bad4c869"} Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.373900 4634 scope.go:117] "RemoveContainer" containerID="71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.391133 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gpvqs"] Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.395012 4634 scope.go:117] "RemoveContainer" containerID="2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.396185 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gpvqs"] Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.427409 4634 scope.go:117] "RemoveContainer" containerID="d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.442650 4634 scope.go:117] "RemoveContainer" containerID="71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c" Sep 29 13:48:02 crc kubenswrapper[4634]: E0929 13:48:02.443164 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c\": container with ID starting with 71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c not found: ID does not exist" containerID="71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.443196 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c"} err="failed to get container status \"71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c\": rpc error: code = NotFound desc = could not find container \"71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c\": container with ID starting with 71229c3a2b1e0e0ff5280289e106a064135fb4d431bf47bd98fa3f7ce409392c not found: ID does not exist" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.443228 4634 scope.go:117] "RemoveContainer" containerID="2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379" Sep 29 13:48:02 crc kubenswrapper[4634]: E0929 13:48:02.443658 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379\": container with ID starting with 2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379 not found: ID does not exist" containerID="2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.443678 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379"} err="failed to get container status \"2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379\": rpc error: code = NotFound desc = could not find container \"2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379\": container with ID starting with 2d8a08d96eb12c544218bae4a805a4eaae440ccf12a55368bebd5f7227dbb379 not found: ID does not exist" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.443689 4634 scope.go:117] "RemoveContainer" containerID="d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b" Sep 29 13:48:02 crc kubenswrapper[4634]: E0929 13:48:02.443953 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b\": container with ID starting with d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b not found: ID does not exist" containerID="d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b" Sep 29 13:48:02 crc kubenswrapper[4634]: I0929 13:48:02.443991 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b"} err="failed to get container status \"d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b\": rpc error: code = NotFound desc = could not find container \"d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b\": container with ID starting with d2ada29c77d5848cc2649265b6126219c13275af512e2c7358dc24861b304a7b not found: ID does not exist" Sep 29 13:48:04 crc kubenswrapper[4634]: I0929 13:48:04.125760 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" path="/var/lib/kubelet/pods/fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8/volumes" Sep 29 13:48:14 crc kubenswrapper[4634]: I0929 13:48:14.395906 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:48:14 crc kubenswrapper[4634]: I0929 13:48:14.396889 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:48:14 crc kubenswrapper[4634]: I0929 13:48:14.396965 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:48:14 crc kubenswrapper[4634]: I0929 13:48:14.397887 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:48:14 crc kubenswrapper[4634]: I0929 13:48:14.397996 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd" gracePeriod=600 Sep 29 13:48:15 crc kubenswrapper[4634]: I0929 13:48:15.440545 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd" exitCode=0 Sep 29 13:48:15 crc kubenswrapper[4634]: I0929 13:48:15.440620 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd"} Sep 29 13:48:15 crc kubenswrapper[4634]: I0929 13:48:15.440925 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"9dabd1bf385841cebfd86dae33dc8ce33877bc83fd06ad816398ab585db5694d"} Sep 29 13:48:21 crc kubenswrapper[4634]: I0929 13:48:21.795329 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" podUID="172d0968-1bd7-48d4-9bcd-62590bead86c" containerName="oauth-openshift" containerID="cri-o://d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b" gracePeriod=15 Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.208818 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.245402 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7b964c775c-jxrj4"] Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248109 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerName="extract-utilities" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248132 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerName="extract-utilities" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248145 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerName="extract-utilities" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248154 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerName="extract-utilities" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248162 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" containerName="extract-utilities" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248170 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" containerName="extract-utilities" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248180 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248195 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248206 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248294 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248305 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="172d0968-1bd7-48d4-9bcd-62590bead86c" containerName="oauth-openshift" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248312 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="172d0968-1bd7-48d4-9bcd-62590bead86c" containerName="oauth-openshift" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248322 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" containerName="extract-content" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248329 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" containerName="extract-content" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248354 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248362 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248374 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19da7e0e-5b6f-4b46-8998-a0f78da0289f" containerName="pruner" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248383 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="19da7e0e-5b6f-4b46-8998-a0f78da0289f" containerName="pruner" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248391 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerName="extract-content" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248397 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerName="extract-content" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248407 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerName="extract-content" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248415 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerName="extract-content" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248435 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerName="extract-utilities" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248442 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerName="extract-utilities" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248452 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a99d559-349e-4679-8139-0ed7a9617af7" containerName="pruner" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248459 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a99d559-349e-4679-8139-0ed7a9617af7" containerName="pruner" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248488 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerName="extract-content" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248496 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerName="extract-content" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.248509 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248517 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248632 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="172d0968-1bd7-48d4-9bcd-62590bead86c" containerName="oauth-openshift" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248642 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a99d559-349e-4679-8139-0ed7a9617af7" containerName="pruner" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248651 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="df83a2e1-7545-44da-91a3-72925fe5fd1b" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248659 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbd41564-d1da-427c-8bd6-6a2faea397cc" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248672 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="48259401-fe1f-4b48-9574-af1c32bccce5" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248687 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="19da7e0e-5b6f-4b46-8998-a0f78da0289f" containerName="pruner" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.248698 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc80b96a-5e2d-4194-8d8c-fb0bf22a58c8" containerName="registry-server" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.249438 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.255353 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7b964c775c-jxrj4"] Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.363749 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-cliconfig\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.363826 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-policies\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.363855 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-ocp-branding-template\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.364550 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.364566 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365077 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-router-certs\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365173 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-session\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365518 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-trusted-ca-bundle\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365561 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-login\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365590 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-serving-cert\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365630 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-provider-selection\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365657 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-dir\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365687 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-idp-0-file-data\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365715 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tg584\" (UniqueName: \"kubernetes.io/projected/172d0968-1bd7-48d4-9bcd-62590bead86c-kube-api-access-tg584\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365778 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-error\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365808 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-service-ca\") pod \"172d0968-1bd7-48d4-9bcd-62590bead86c\" (UID: \"172d0968-1bd7-48d4-9bcd-62590bead86c\") " Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.365886 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkg59\" (UniqueName: \"kubernetes.io/projected/4a12d3f0-e899-491d-9748-6bfc079f1f2a-kube-api-access-fkg59\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366153 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-audit-policies\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366182 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-session\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366213 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366238 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a12d3f0-e899-491d-9748-6bfc079f1f2a-audit-dir\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366232 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366269 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366300 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366322 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-login\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366345 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-error\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366378 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366404 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366429 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366459 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366509 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366553 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366583 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366596 4634 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.366869 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.367496 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.369922 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/172d0968-1bd7-48d4-9bcd-62590bead86c-kube-api-access-tg584" (OuterVolumeSpecName: "kube-api-access-tg584") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "kube-api-access-tg584". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.370381 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.370851 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.371388 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.371629 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.371843 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.371994 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.372400 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.378296 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "172d0968-1bd7-48d4-9bcd-62590bead86c" (UID: "172d0968-1bd7-48d4-9bcd-62590bead86c"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467713 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-session\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467763 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-audit-policies\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467797 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467826 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a12d3f0-e899-491d-9748-6bfc079f1f2a-audit-dir\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467850 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467874 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467895 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-login\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467916 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-error\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467945 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467970 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.467993 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.468023 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.468067 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.468121 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkg59\" (UniqueName: \"kubernetes.io/projected/4a12d3f0-e899-491d-9748-6bfc079f1f2a-kube-api-access-fkg59\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.468176 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.468193 4634 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/172d0968-1bd7-48d4-9bcd-62590bead86c-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.468275 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.468813 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4a12d3f0-e899-491d-9748-6bfc079f1f2a-audit-dir\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.468955 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.469246 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-audit-policies\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.469571 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.470074 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.471985 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.472000 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.472022 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tg584\" (UniqueName: \"kubernetes.io/projected/172d0968-1bd7-48d4-9bcd-62590bead86c-kube-api-access-tg584\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.473051 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.473069 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.472581 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-error\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.472650 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.472818 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-login\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.473110 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.473163 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.473181 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.473198 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.472675 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-session\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.473211 4634 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/172d0968-1bd7-48d4-9bcd-62590bead86c-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.473919 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.475114 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4a12d3f0-e899-491d-9748-6bfc079f1f2a-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.485402 4634 generic.go:334] "Generic (PLEG): container finished" podID="172d0968-1bd7-48d4-9bcd-62590bead86c" containerID="d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b" exitCode=0 Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.485448 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" event={"ID":"172d0968-1bd7-48d4-9bcd-62590bead86c","Type":"ContainerDied","Data":"d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b"} Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.485467 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.485713 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-vvk4f" event={"ID":"172d0968-1bd7-48d4-9bcd-62590bead86c","Type":"ContainerDied","Data":"4a8a619db538d8b1206ec95785d30be1ac210a419d3ba0134e752dba0d905288"} Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.485852 4634 scope.go:117] "RemoveContainer" containerID="d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.487848 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkg59\" (UniqueName: \"kubernetes.io/projected/4a12d3f0-e899-491d-9748-6bfc079f1f2a-kube-api-access-fkg59\") pod \"oauth-openshift-7b964c775c-jxrj4\" (UID: \"4a12d3f0-e899-491d-9748-6bfc079f1f2a\") " pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.510041 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vvk4f"] Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.513954 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-vvk4f"] Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.523885 4634 scope.go:117] "RemoveContainer" containerID="d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b" Sep 29 13:48:22 crc kubenswrapper[4634]: E0929 13:48:22.524417 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b\": container with ID starting with d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b not found: ID does not exist" containerID="d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.524556 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b"} err="failed to get container status \"d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b\": rpc error: code = NotFound desc = could not find container \"d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b\": container with ID starting with d5822046ea519f8bef8595d185dc77ca248f0f8f137d1a02539f77761afc3a3b not found: ID does not exist" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.576935 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:22 crc kubenswrapper[4634]: I0929 13:48:22.780793 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7b964c775c-jxrj4"] Sep 29 13:48:23 crc kubenswrapper[4634]: I0929 13:48:23.492447 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" event={"ID":"4a12d3f0-e899-491d-9748-6bfc079f1f2a","Type":"ContainerStarted","Data":"1ba4d2046e37950b5f7fdc602642017af6e4f34999480fb2005b72f44db194f7"} Sep 29 13:48:23 crc kubenswrapper[4634]: I0929 13:48:23.494026 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" event={"ID":"4a12d3f0-e899-491d-9748-6bfc079f1f2a","Type":"ContainerStarted","Data":"7803b381cda898b1a3e7da02512aa793f42f2b3ed080698e74db4d50b2da80eb"} Sep 29 13:48:23 crc kubenswrapper[4634]: I0929 13:48:23.494219 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:23 crc kubenswrapper[4634]: I0929 13:48:23.499805 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" Sep 29 13:48:23 crc kubenswrapper[4634]: I0929 13:48:23.522125 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7b964c775c-jxrj4" podStartSLOduration=27.522080235 podStartE2EDuration="27.522080235s" podCreationTimestamp="2025-09-29 13:47:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:48:23.51869878 +0000 UTC m=+234.087426559" watchObservedRunningTime="2025-09-29 13:48:23.522080235 +0000 UTC m=+234.090807994" Sep 29 13:48:24 crc kubenswrapper[4634]: I0929 13:48:24.116533 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="172d0968-1bd7-48d4-9bcd-62590bead86c" path="/var/lib/kubelet/pods/172d0968-1bd7-48d4-9bcd-62590bead86c/volumes" Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.922303 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnr2w"] Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.923102 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qnr2w" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerName="registry-server" containerID="cri-o://2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486" gracePeriod=30 Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.934214 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q4lqj"] Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.934522 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q4lqj" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerName="registry-server" containerID="cri-o://aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee" gracePeriod=30 Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.945102 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9v7l5"] Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.945566 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" podUID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" containerName="marketplace-operator" containerID="cri-o://eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823" gracePeriod=30 Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.967340 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dc9w"] Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.967760 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7dc9w" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="registry-server" containerID="cri-o://2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a" gracePeriod=30 Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.992399 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-44875"] Sep 29 13:48:49 crc kubenswrapper[4634]: I0929 13:48:49.996518 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-44875" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="registry-server" containerID="cri-o://9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223" gracePeriod=30 Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.008473 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pcm5r"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.009749 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.020106 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pcm5r"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.114140 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.114223 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.114263 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8599n\" (UniqueName: \"kubernetes.io/projected/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-kube-api-access-8599n\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.215225 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.215607 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.215648 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8599n\" (UniqueName: \"kubernetes.io/projected/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-kube-api-access-8599n\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.217103 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.236318 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8599n\" (UniqueName: \"kubernetes.io/projected/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-kube-api-access-8599n\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.247743 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pcm5r\" (UID: \"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9\") " pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.294261 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.298515 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.311730 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a is running failed: container process not found" containerID="2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a" cmd=["grpc_health_probe","-addr=:50051"] Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.313728 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a is running failed: container process not found" containerID="2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a" cmd=["grpc_health_probe","-addr=:50051"] Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.314903 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a is running failed: container process not found" containerID="2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a" cmd=["grpc_health_probe","-addr=:50051"] Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.314955 4634 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-7dc9w" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="registry-server" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.418924 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-catalog-content\") pod \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.418968 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62w8c\" (UniqueName: \"kubernetes.io/projected/27ebe57d-6e97-49c8-8106-f4e6983b3e30-kube-api-access-62w8c\") pod \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.419003 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-utilities\") pod \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\" (UID: \"27ebe57d-6e97-49c8-8106-f4e6983b3e30\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.423295 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-utilities" (OuterVolumeSpecName: "utilities") pod "27ebe57d-6e97-49c8-8106-f4e6983b3e30" (UID: "27ebe57d-6e97-49c8-8106-f4e6983b3e30"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.424605 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27ebe57d-6e97-49c8-8106-f4e6983b3e30-kube-api-access-62w8c" (OuterVolumeSpecName: "kube-api-access-62w8c") pod "27ebe57d-6e97-49c8-8106-f4e6983b3e30" (UID: "27ebe57d-6e97-49c8-8106-f4e6983b3e30"). InnerVolumeSpecName "kube-api-access-62w8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.447630 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.507708 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.508059 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.509201 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.513431 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27ebe57d-6e97-49c8-8106-f4e6983b3e30" (UID: "27ebe57d-6e97-49c8-8106-f4e6983b3e30"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.520827 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.520855 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ebe57d-6e97-49c8-8106-f4e6983b3e30-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.520868 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62w8c\" (UniqueName: \"kubernetes.io/projected/27ebe57d-6e97-49c8-8106-f4e6983b3e30-kube-api-access-62w8c\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621512 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-trusted-ca\") pod \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621580 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpnp7\" (UniqueName: \"kubernetes.io/projected/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-kube-api-access-dpnp7\") pod \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621635 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-catalog-content\") pod \"422b00d2-0df9-4778-8040-d2b175d8e67e\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621670 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-utilities\") pod \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621715 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpfs9\" (UniqueName: \"kubernetes.io/projected/422b00d2-0df9-4778-8040-d2b175d8e67e-kube-api-access-gpfs9\") pod \"422b00d2-0df9-4778-8040-d2b175d8e67e\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621739 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-catalog-content\") pod \"54680624-14fd-47bf-829b-1f0342ed7db1\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621764 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-utilities\") pod \"422b00d2-0df9-4778-8040-d2b175d8e67e\" (UID: \"422b00d2-0df9-4778-8040-d2b175d8e67e\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621783 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-utilities\") pod \"54680624-14fd-47bf-829b-1f0342ed7db1\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621819 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t88d9\" (UniqueName: \"kubernetes.io/projected/e93988bf-f719-4ad5-a46c-73e3877b3e6b-kube-api-access-t88d9\") pod \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621864 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-catalog-content\") pod \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\" (UID: \"e93988bf-f719-4ad5-a46c-73e3877b3e6b\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621906 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kpct\" (UniqueName: \"kubernetes.io/projected/54680624-14fd-47bf-829b-1f0342ed7db1-kube-api-access-5kpct\") pod \"54680624-14fd-47bf-829b-1f0342ed7db1\" (UID: \"54680624-14fd-47bf-829b-1f0342ed7db1\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.621937 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-operator-metrics\") pod \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\" (UID: \"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70\") " Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.622265 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" (UID: "f0bdb1e6-daf7-470b-ae19-248cbdf8ef70"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.623348 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-utilities" (OuterVolumeSpecName: "utilities") pod "422b00d2-0df9-4778-8040-d2b175d8e67e" (UID: "422b00d2-0df9-4778-8040-d2b175d8e67e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.623888 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-utilities" (OuterVolumeSpecName: "utilities") pod "54680624-14fd-47bf-829b-1f0342ed7db1" (UID: "54680624-14fd-47bf-829b-1f0342ed7db1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.626374 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-utilities" (OuterVolumeSpecName: "utilities") pod "e93988bf-f719-4ad5-a46c-73e3877b3e6b" (UID: "e93988bf-f719-4ad5-a46c-73e3877b3e6b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.627912 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54680624-14fd-47bf-829b-1f0342ed7db1-kube-api-access-5kpct" (OuterVolumeSpecName: "kube-api-access-5kpct") pod "54680624-14fd-47bf-829b-1f0342ed7db1" (UID: "54680624-14fd-47bf-829b-1f0342ed7db1"). InnerVolumeSpecName "kube-api-access-5kpct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.629678 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/422b00d2-0df9-4778-8040-d2b175d8e67e-kube-api-access-gpfs9" (OuterVolumeSpecName: "kube-api-access-gpfs9") pod "422b00d2-0df9-4778-8040-d2b175d8e67e" (UID: "422b00d2-0df9-4778-8040-d2b175d8e67e"). InnerVolumeSpecName "kube-api-access-gpfs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.631469 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-kube-api-access-dpnp7" (OuterVolumeSpecName: "kube-api-access-dpnp7") pod "f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" (UID: "f0bdb1e6-daf7-470b-ae19-248cbdf8ef70"). InnerVolumeSpecName "kube-api-access-dpnp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.631497 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e93988bf-f719-4ad5-a46c-73e3877b3e6b-kube-api-access-t88d9" (OuterVolumeSpecName: "kube-api-access-t88d9") pod "e93988bf-f719-4ad5-a46c-73e3877b3e6b" (UID: "e93988bf-f719-4ad5-a46c-73e3877b3e6b"). InnerVolumeSpecName "kube-api-access-t88d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.640217 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "422b00d2-0df9-4778-8040-d2b175d8e67e" (UID: "422b00d2-0df9-4778-8040-d2b175d8e67e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.642388 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" (UID: "f0bdb1e6-daf7-470b-ae19-248cbdf8ef70"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.666484 4634 generic.go:334] "Generic (PLEG): container finished" podID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerID="aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee" exitCode=0 Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.666548 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4lqj" event={"ID":"27ebe57d-6e97-49c8-8106-f4e6983b3e30","Type":"ContainerDied","Data":"aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.666576 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4lqj" event={"ID":"27ebe57d-6e97-49c8-8106-f4e6983b3e30","Type":"ContainerDied","Data":"28b5e9ff6ea6ee6497efcea7261ebc738f6128ec2b72b3abf4c6e58e3c34ebd8"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.666615 4634 scope.go:117] "RemoveContainer" containerID="aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.666739 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4lqj" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.672441 4634 generic.go:334] "Generic (PLEG): container finished" podID="54680624-14fd-47bf-829b-1f0342ed7db1" containerID="9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223" exitCode=0 Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.672540 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44875" event={"ID":"54680624-14fd-47bf-829b-1f0342ed7db1","Type":"ContainerDied","Data":"9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.672570 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44875" event={"ID":"54680624-14fd-47bf-829b-1f0342ed7db1","Type":"ContainerDied","Data":"d5fe52fd4f98b61f7cff4c44514ed9139bbca8d35ffd68b6eef7d12d3d02916b"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.672627 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-44875" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.673815 4634 generic.go:334] "Generic (PLEG): container finished" podID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" containerID="eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823" exitCode=0 Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.673867 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" event={"ID":"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70","Type":"ContainerDied","Data":"eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.673890 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" event={"ID":"f0bdb1e6-daf7-470b-ae19-248cbdf8ef70","Type":"ContainerDied","Data":"f5a0bcde16799485fdf71d6e08f860f21c2ed253e5bc328dd0ba2c71f192a950"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.673917 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9v7l5" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.679374 4634 generic.go:334] "Generic (PLEG): container finished" podID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerID="2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a" exitCode=0 Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.679417 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dc9w" event={"ID":"422b00d2-0df9-4778-8040-d2b175d8e67e","Type":"ContainerDied","Data":"2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.679756 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7dc9w" event={"ID":"422b00d2-0df9-4778-8040-d2b175d8e67e","Type":"ContainerDied","Data":"5b32827bf9582e31f660f79870ee15d12cab4f15af6820d27edcac1000f9c90f"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.679464 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7dc9w" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.681497 4634 generic.go:334] "Generic (PLEG): container finished" podID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerID="2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486" exitCode=0 Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.681535 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnr2w" event={"ID":"e93988bf-f719-4ad5-a46c-73e3877b3e6b","Type":"ContainerDied","Data":"2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.681551 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnr2w" event={"ID":"e93988bf-f719-4ad5-a46c-73e3877b3e6b","Type":"ContainerDied","Data":"ac94e66c23bdf42f7d98b66e279c66ca58ef3c1c2e496a920267fff53178b98e"} Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.681610 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnr2w" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.684703 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e93988bf-f719-4ad5-a46c-73e3877b3e6b" (UID: "e93988bf-f719-4ad5-a46c-73e3877b3e6b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.687404 4634 scope.go:117] "RemoveContainer" containerID="e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.701366 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q4lqj"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.704711 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q4lqj"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.707630 4634 scope.go:117] "RemoveContainer" containerID="f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.724975 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t88d9\" (UniqueName: \"kubernetes.io/projected/e93988bf-f719-4ad5-a46c-73e3877b3e6b-kube-api-access-t88d9\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725007 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725017 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kpct\" (UniqueName: \"kubernetes.io/projected/54680624-14fd-47bf-829b-1f0342ed7db1-kube-api-access-5kpct\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725026 4634 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725036 4634 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725045 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpnp7\" (UniqueName: \"kubernetes.io/projected/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70-kube-api-access-dpnp7\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725055 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725064 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e93988bf-f719-4ad5-a46c-73e3877b3e6b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725075 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpfs9\" (UniqueName: \"kubernetes.io/projected/422b00d2-0df9-4778-8040-d2b175d8e67e-kube-api-access-gpfs9\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725100 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/422b00d2-0df9-4778-8040-d2b175d8e67e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.725108 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.731317 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dc9w"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.731815 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7dc9w"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.737704 4634 scope.go:117] "RemoveContainer" containerID="aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.739384 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee\": container with ID starting with aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee not found: ID does not exist" containerID="aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.739647 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee"} err="failed to get container status \"aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee\": rpc error: code = NotFound desc = could not find container \"aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee\": container with ID starting with aa5ed0baf8dd6f265c4bcaf1fae9b8f398cb4190574aca97366904f0648a87ee not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.739668 4634 scope.go:117] "RemoveContainer" containerID="e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.740485 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507\": container with ID starting with e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507 not found: ID does not exist" containerID="e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.740511 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507"} err="failed to get container status \"e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507\": rpc error: code = NotFound desc = could not find container \"e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507\": container with ID starting with e1154bc3af8d257b1f4b34fc07f60b75af9ed9bd8bea168c273c18191c470507 not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.740530 4634 scope.go:117] "RemoveContainer" containerID="f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.741070 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d\": container with ID starting with f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d not found: ID does not exist" containerID="f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.741142 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d"} err="failed to get container status \"f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d\": rpc error: code = NotFound desc = could not find container \"f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d\": container with ID starting with f46f7f19365441334ed3e0cf75622e2ccdd1404ca306a13f094b1e6cec41d05d not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.741185 4634 scope.go:117] "RemoveContainer" containerID="9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.748615 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9v7l5"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.752854 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9v7l5"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.759521 4634 scope.go:117] "RemoveContainer" containerID="740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.760966 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "54680624-14fd-47bf-829b-1f0342ed7db1" (UID: "54680624-14fd-47bf-829b-1f0342ed7db1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.773621 4634 scope.go:117] "RemoveContainer" containerID="970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.784599 4634 scope.go:117] "RemoveContainer" containerID="9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.784956 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223\": container with ID starting with 9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223 not found: ID does not exist" containerID="9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.784993 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223"} err="failed to get container status \"9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223\": rpc error: code = NotFound desc = could not find container \"9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223\": container with ID starting with 9d7c34c21761ef3a3e91f908740e8ec9929d96a8e1f3d497e5fcaad24b198223 not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.785021 4634 scope.go:117] "RemoveContainer" containerID="740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.785428 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0\": container with ID starting with 740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0 not found: ID does not exist" containerID="740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.785455 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0"} err="failed to get container status \"740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0\": rpc error: code = NotFound desc = could not find container \"740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0\": container with ID starting with 740fde21dea9f375c074f3bb2a762576643ba0795dd8b951ed2f8b53d5b7ffc0 not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.785476 4634 scope.go:117] "RemoveContainer" containerID="970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.785673 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1\": container with ID starting with 970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1 not found: ID does not exist" containerID="970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.785695 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1"} err="failed to get container status \"970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1\": rpc error: code = NotFound desc = could not find container \"970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1\": container with ID starting with 970eb3298717f49f4239e8d2e7e474d3aaf7f893bbc5fa4e623a44d828f53db1 not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.785710 4634 scope.go:117] "RemoveContainer" containerID="eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.802369 4634 scope.go:117] "RemoveContainer" containerID="eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.803017 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823\": container with ID starting with eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823 not found: ID does not exist" containerID="eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.803061 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823"} err="failed to get container status \"eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823\": rpc error: code = NotFound desc = could not find container \"eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823\": container with ID starting with eefe79e655217e947128572ccc5652b44482c2beac158e156672c0bca2306823 not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.803112 4634 scope.go:117] "RemoveContainer" containerID="2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.816593 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pcm5r"] Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.821908 4634 scope.go:117] "RemoveContainer" containerID="21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.827051 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54680624-14fd-47bf-829b-1f0342ed7db1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.834398 4634 scope.go:117] "RemoveContainer" containerID="882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.851511 4634 scope.go:117] "RemoveContainer" containerID="2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.851907 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a\": container with ID starting with 2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a not found: ID does not exist" containerID="2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.851945 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a"} err="failed to get container status \"2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a\": rpc error: code = NotFound desc = could not find container \"2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a\": container with ID starting with 2cf09f5b1a5c46a3a1415571aeaaca71214e84240b872c20b8e5d236a3e2d87a not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.851976 4634 scope.go:117] "RemoveContainer" containerID="21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.852369 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe\": container with ID starting with 21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe not found: ID does not exist" containerID="21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.852405 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe"} err="failed to get container status \"21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe\": rpc error: code = NotFound desc = could not find container \"21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe\": container with ID starting with 21056aaaa23a6f910a2b0881248fce394b8dc9b5a2b9696470f06b3b15e14afe not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.852432 4634 scope.go:117] "RemoveContainer" containerID="882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.852737 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e\": container with ID starting with 882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e not found: ID does not exist" containerID="882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.852776 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e"} err="failed to get container status \"882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e\": rpc error: code = NotFound desc = could not find container \"882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e\": container with ID starting with 882b24d24272a3e29439caba3c89adf8c468b9e70b18de6e61ea028e0c93f01e not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.852804 4634 scope.go:117] "RemoveContainer" containerID="2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.864993 4634 scope.go:117] "RemoveContainer" containerID="972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.876041 4634 scope.go:117] "RemoveContainer" containerID="e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.891348 4634 scope.go:117] "RemoveContainer" containerID="2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.891679 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486\": container with ID starting with 2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486 not found: ID does not exist" containerID="2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.891706 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486"} err="failed to get container status \"2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486\": rpc error: code = NotFound desc = could not find container \"2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486\": container with ID starting with 2f71b0a6056c90a7fb7143afe761ad84e8e86edb051e21eaa35130462ae79486 not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.891727 4634 scope.go:117] "RemoveContainer" containerID="972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.891917 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae\": container with ID starting with 972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae not found: ID does not exist" containerID="972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.891941 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae"} err="failed to get container status \"972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae\": rpc error: code = NotFound desc = could not find container \"972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae\": container with ID starting with 972e045ee8d251629a71b37259245b4705ba4a10f7f586da42965459924737ae not found: ID does not exist" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.891956 4634 scope.go:117] "RemoveContainer" containerID="e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745" Sep 29 13:48:50 crc kubenswrapper[4634]: E0929 13:48:50.892151 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745\": container with ID starting with e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745 not found: ID does not exist" containerID="e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745" Sep 29 13:48:50 crc kubenswrapper[4634]: I0929 13:48:50.892169 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745"} err="failed to get container status \"e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745\": rpc error: code = NotFound desc = could not find container \"e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745\": container with ID starting with e51362df2cb32d65ea9bce845ae730253915b9c322d3088781357cf821464745 not found: ID does not exist" Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.000626 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-44875"] Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.002781 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-44875"] Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.008229 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnr2w"] Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.016257 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qnr2w"] Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.690332 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" event={"ID":"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9","Type":"ContainerStarted","Data":"5a65860fffc59660b2a566da555459933ff4c10002019d7a18e5e6d177285cdb"} Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.690610 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" event={"ID":"a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9","Type":"ContainerStarted","Data":"44f1bcba22ed37fc27826de3d371d7e9ec99246790042392c3c5b46197b7fa26"} Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.690629 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.693790 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" Sep 29 13:48:51 crc kubenswrapper[4634]: I0929 13:48:51.706158 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pcm5r" podStartSLOduration=2.706140859 podStartE2EDuration="2.706140859s" podCreationTimestamp="2025-09-29 13:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:48:51.703560402 +0000 UTC m=+262.272288151" watchObservedRunningTime="2025-09-29 13:48:51.706140859 +0000 UTC m=+262.274868608" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.119595 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" path="/var/lib/kubelet/pods/27ebe57d-6e97-49c8-8106-f4e6983b3e30/volumes" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.120291 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" path="/var/lib/kubelet/pods/422b00d2-0df9-4778-8040-d2b175d8e67e/volumes" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.120886 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" path="/var/lib/kubelet/pods/54680624-14fd-47bf-829b-1f0342ed7db1/volumes" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.121966 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" path="/var/lib/kubelet/pods/e93988bf-f719-4ad5-a46c-73e3877b3e6b/volumes" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.122658 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" path="/var/lib/kubelet/pods/f0bdb1e6-daf7-470b-ae19-248cbdf8ef70/volumes" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141602 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qkgkn"] Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141831 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" containerName="marketplace-operator" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141853 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" containerName="marketplace-operator" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141865 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="extract-content" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141873 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="extract-content" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141887 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerName="extract-utilities" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141896 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerName="extract-utilities" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141905 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141912 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141924 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerName="extract-utilities" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141932 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerName="extract-utilities" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141941 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="extract-content" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141948 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="extract-content" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141957 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141966 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141978 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="extract-utilities" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.141985 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="extract-utilities" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.141997 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="extract-utilities" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142004 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="extract-utilities" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.142016 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerName="extract-content" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142025 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerName="extract-content" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.142036 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerName="extract-content" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142046 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerName="extract-content" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.142060 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142069 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: E0929 13:48:52.142098 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142107 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142213 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0bdb1e6-daf7-470b-ae19-248cbdf8ef70" containerName="marketplace-operator" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142229 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="54680624-14fd-47bf-829b-1f0342ed7db1" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142244 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e93988bf-f719-4ad5-a46c-73e3877b3e6b" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142254 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="27ebe57d-6e97-49c8-8106-f4e6983b3e30" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.142264 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="422b00d2-0df9-4778-8040-d2b175d8e67e" containerName="registry-server" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.143015 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.145989 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.156982 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qkgkn"] Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.243129 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1add5ad4-56e7-4d54-aaab-a74664b398ff-catalog-content\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.243200 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1add5ad4-56e7-4d54-aaab-a74664b398ff-utilities\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.243726 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d44jc\" (UniqueName: \"kubernetes.io/projected/1add5ad4-56e7-4d54-aaab-a74664b398ff-kube-api-access-d44jc\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.343284 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tw5l9"] Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.344213 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.345826 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d44jc\" (UniqueName: \"kubernetes.io/projected/1add5ad4-56e7-4d54-aaab-a74664b398ff-kube-api-access-d44jc\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.345912 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1add5ad4-56e7-4d54-aaab-a74664b398ff-catalog-content\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.345941 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1add5ad4-56e7-4d54-aaab-a74664b398ff-utilities\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.346516 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1add5ad4-56e7-4d54-aaab-a74664b398ff-utilities\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.346529 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1add5ad4-56e7-4d54-aaab-a74664b398ff-catalog-content\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.347117 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.352316 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tw5l9"] Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.368915 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d44jc\" (UniqueName: \"kubernetes.io/projected/1add5ad4-56e7-4d54-aaab-a74664b398ff-kube-api-access-d44jc\") pod \"redhat-marketplace-qkgkn\" (UID: \"1add5ad4-56e7-4d54-aaab-a74664b398ff\") " pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.447274 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-catalog-content\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.447341 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-utilities\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.447374 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb57n\" (UniqueName: \"kubernetes.io/projected/df7b8866-1de8-4b2e-a301-591d5abfd01a-kube-api-access-rb57n\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.465978 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.548512 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-catalog-content\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.548842 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-utilities\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.548877 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb57n\" (UniqueName: \"kubernetes.io/projected/df7b8866-1de8-4b2e-a301-591d5abfd01a-kube-api-access-rb57n\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.549438 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-catalog-content\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.549551 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-utilities\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.570045 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb57n\" (UniqueName: \"kubernetes.io/projected/df7b8866-1de8-4b2e-a301-591d5abfd01a-kube-api-access-rb57n\") pod \"certified-operators-tw5l9\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.665801 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:48:52 crc kubenswrapper[4634]: I0929 13:48:52.848908 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qkgkn"] Sep 29 13:48:53 crc kubenswrapper[4634]: I0929 13:48:53.046733 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tw5l9"] Sep 29 13:48:53 crc kubenswrapper[4634]: I0929 13:48:53.707527 4634 generic.go:334] "Generic (PLEG): container finished" podID="1add5ad4-56e7-4d54-aaab-a74664b398ff" containerID="49196ebbaeb1e8e2ab64d9e5d0e89cded08b33fb37c22bc6442b0474bc3159ac" exitCode=0 Sep 29 13:48:53 crc kubenswrapper[4634]: I0929 13:48:53.707850 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qkgkn" event={"ID":"1add5ad4-56e7-4d54-aaab-a74664b398ff","Type":"ContainerDied","Data":"49196ebbaeb1e8e2ab64d9e5d0e89cded08b33fb37c22bc6442b0474bc3159ac"} Sep 29 13:48:53 crc kubenswrapper[4634]: I0929 13:48:53.707879 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qkgkn" event={"ID":"1add5ad4-56e7-4d54-aaab-a74664b398ff","Type":"ContainerStarted","Data":"9a65e215b9bd430f4bd82d9ab7bf91519e1463cbb78091a15d94ef97822e93d8"} Sep 29 13:48:53 crc kubenswrapper[4634]: I0929 13:48:53.709739 4634 generic.go:334] "Generic (PLEG): container finished" podID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerID="07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e" exitCode=0 Sep 29 13:48:53 crc kubenswrapper[4634]: I0929 13:48:53.709803 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tw5l9" event={"ID":"df7b8866-1de8-4b2e-a301-591d5abfd01a","Type":"ContainerDied","Data":"07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e"} Sep 29 13:48:53 crc kubenswrapper[4634]: I0929 13:48:53.709842 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tw5l9" event={"ID":"df7b8866-1de8-4b2e-a301-591d5abfd01a","Type":"ContainerStarted","Data":"ed2b72ec748580aede1b76dce92a9e5486f838cb1e712ccf44240e549268c9c4"} Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.543829 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s569s"] Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.545105 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.547816 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.557236 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s569s"] Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.675145 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m62mx\" (UniqueName: \"kubernetes.io/projected/1c8311b2-d781-4903-961e-33b0b839aeae-kube-api-access-m62mx\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.675479 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c8311b2-d781-4903-961e-33b0b839aeae-utilities\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.675552 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c8311b2-d781-4903-961e-33b0b839aeae-catalog-content\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.749610 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2nr7c"] Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.750522 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.752329 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.759090 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2nr7c"] Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.778852 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c8311b2-d781-4903-961e-33b0b839aeae-utilities\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.778910 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c8311b2-d781-4903-961e-33b0b839aeae-catalog-content\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.779174 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m62mx\" (UniqueName: \"kubernetes.io/projected/1c8311b2-d781-4903-961e-33b0b839aeae-kube-api-access-m62mx\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.780183 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1c8311b2-d781-4903-961e-33b0b839aeae-utilities\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.780402 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1c8311b2-d781-4903-961e-33b0b839aeae-catalog-content\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.801579 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m62mx\" (UniqueName: \"kubernetes.io/projected/1c8311b2-d781-4903-961e-33b0b839aeae-kube-api-access-m62mx\") pod \"redhat-operators-s569s\" (UID: \"1c8311b2-d781-4903-961e-33b0b839aeae\") " pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.880188 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-catalog-content\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.880242 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-utilities\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.880281 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzlz2\" (UniqueName: \"kubernetes.io/projected/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-kube-api-access-wzlz2\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.901240 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.981305 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzlz2\" (UniqueName: \"kubernetes.io/projected/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-kube-api-access-wzlz2\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.981647 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-catalog-content\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.981690 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-utilities\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.982503 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-utilities\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:54 crc kubenswrapper[4634]: I0929 13:48:54.982892 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-catalog-content\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.002884 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzlz2\" (UniqueName: \"kubernetes.io/projected/b28ef713-38f8-4b94-a0fc-bc83b791a6d3-kube-api-access-wzlz2\") pod \"community-operators-2nr7c\" (UID: \"b28ef713-38f8-4b94-a0fc-bc83b791a6d3\") " pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.117975 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s569s"] Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.119935 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:48:55 crc kubenswrapper[4634]: W0929 13:48:55.124077 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c8311b2_d781_4903_961e_33b0b839aeae.slice/crio-8cad2d777fdc3a257ae81745434aa09dcc14ec37c3cd93543cdba24cef4cee4c WatchSource:0}: Error finding container 8cad2d777fdc3a257ae81745434aa09dcc14ec37c3cd93543cdba24cef4cee4c: Status 404 returned error can't find the container with id 8cad2d777fdc3a257ae81745434aa09dcc14ec37c3cd93543cdba24cef4cee4c Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.507076 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2nr7c"] Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.721447 4634 generic.go:334] "Generic (PLEG): container finished" podID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerID="e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975" exitCode=0 Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.721518 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tw5l9" event={"ID":"df7b8866-1de8-4b2e-a301-591d5abfd01a","Type":"ContainerDied","Data":"e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975"} Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.722969 4634 generic.go:334] "Generic (PLEG): container finished" podID="1c8311b2-d781-4903-961e-33b0b839aeae" containerID="9ca8ea49df570a425e92c044c118b70f81b9e673ef2d47566478dbe8e7b27bd1" exitCode=0 Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.723599 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s569s" event={"ID":"1c8311b2-d781-4903-961e-33b0b839aeae","Type":"ContainerDied","Data":"9ca8ea49df570a425e92c044c118b70f81b9e673ef2d47566478dbe8e7b27bd1"} Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.723616 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s569s" event={"ID":"1c8311b2-d781-4903-961e-33b0b839aeae","Type":"ContainerStarted","Data":"8cad2d777fdc3a257ae81745434aa09dcc14ec37c3cd93543cdba24cef4cee4c"} Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.729641 4634 generic.go:334] "Generic (PLEG): container finished" podID="b28ef713-38f8-4b94-a0fc-bc83b791a6d3" containerID="1ae7d9ad0623aca4973ca1259690659384cff9f64b7d40e832a0e1de7e85f2d1" exitCode=0 Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.729980 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nr7c" event={"ID":"b28ef713-38f8-4b94-a0fc-bc83b791a6d3","Type":"ContainerDied","Data":"1ae7d9ad0623aca4973ca1259690659384cff9f64b7d40e832a0e1de7e85f2d1"} Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.730011 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nr7c" event={"ID":"b28ef713-38f8-4b94-a0fc-bc83b791a6d3","Type":"ContainerStarted","Data":"0a1219dbae5bdd54dce76420b2affbf95edd4c7e163e997a503362a516c60fe6"} Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.743070 4634 generic.go:334] "Generic (PLEG): container finished" podID="1add5ad4-56e7-4d54-aaab-a74664b398ff" containerID="8b27fd42a2764fe465ff6aeb09ec5190e081b36bf94b434e519cb7206b9ec976" exitCode=0 Sep 29 13:48:55 crc kubenswrapper[4634]: I0929 13:48:55.743143 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qkgkn" event={"ID":"1add5ad4-56e7-4d54-aaab-a74664b398ff","Type":"ContainerDied","Data":"8b27fd42a2764fe465ff6aeb09ec5190e081b36bf94b434e519cb7206b9ec976"} Sep 29 13:48:56 crc kubenswrapper[4634]: I0929 13:48:56.749112 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qkgkn" event={"ID":"1add5ad4-56e7-4d54-aaab-a74664b398ff","Type":"ContainerStarted","Data":"50719a42e16dbbf9505ca5f6229faad3c5d7e1452a367c98d0d74d6f50753d87"} Sep 29 13:48:56 crc kubenswrapper[4634]: I0929 13:48:56.754019 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tw5l9" event={"ID":"df7b8866-1de8-4b2e-a301-591d5abfd01a","Type":"ContainerStarted","Data":"0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2"} Sep 29 13:48:56 crc kubenswrapper[4634]: I0929 13:48:56.755609 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s569s" event={"ID":"1c8311b2-d781-4903-961e-33b0b839aeae","Type":"ContainerStarted","Data":"2eb525575c2324975ef22c2c7b0387ad9c9c0ebcf50131af4f216057a52400f4"} Sep 29 13:48:56 crc kubenswrapper[4634]: I0929 13:48:56.773855 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qkgkn" podStartSLOduration=2.119606805 podStartE2EDuration="4.77383928s" podCreationTimestamp="2025-09-29 13:48:52 +0000 UTC" firstStartedPulling="2025-09-29 13:48:53.70938148 +0000 UTC m=+264.278109229" lastFinishedPulling="2025-09-29 13:48:56.363613955 +0000 UTC m=+266.932341704" observedRunningTime="2025-09-29 13:48:56.771411518 +0000 UTC m=+267.340139267" watchObservedRunningTime="2025-09-29 13:48:56.77383928 +0000 UTC m=+267.342567029" Sep 29 13:48:56 crc kubenswrapper[4634]: I0929 13:48:56.815585 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tw5l9" podStartSLOduration=2.409762407 podStartE2EDuration="4.815570116s" podCreationTimestamp="2025-09-29 13:48:52 +0000 UTC" firstStartedPulling="2025-09-29 13:48:53.713193694 +0000 UTC m=+264.281921443" lastFinishedPulling="2025-09-29 13:48:56.119001403 +0000 UTC m=+266.687729152" observedRunningTime="2025-09-29 13:48:56.812899276 +0000 UTC m=+267.381627025" watchObservedRunningTime="2025-09-29 13:48:56.815570116 +0000 UTC m=+267.384297865" Sep 29 13:48:57 crc kubenswrapper[4634]: I0929 13:48:57.762959 4634 generic.go:334] "Generic (PLEG): container finished" podID="1c8311b2-d781-4903-961e-33b0b839aeae" containerID="2eb525575c2324975ef22c2c7b0387ad9c9c0ebcf50131af4f216057a52400f4" exitCode=0 Sep 29 13:48:57 crc kubenswrapper[4634]: I0929 13:48:57.763025 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s569s" event={"ID":"1c8311b2-d781-4903-961e-33b0b839aeae","Type":"ContainerDied","Data":"2eb525575c2324975ef22c2c7b0387ad9c9c0ebcf50131af4f216057a52400f4"} Sep 29 13:48:57 crc kubenswrapper[4634]: I0929 13:48:57.767242 4634 generic.go:334] "Generic (PLEG): container finished" podID="b28ef713-38f8-4b94-a0fc-bc83b791a6d3" containerID="d2ee21aaa4feb3b4832d8c66554e954d026225c54b59dbbf5e3e779d9d9521f5" exitCode=0 Sep 29 13:48:57 crc kubenswrapper[4634]: I0929 13:48:57.768032 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nr7c" event={"ID":"b28ef713-38f8-4b94-a0fc-bc83b791a6d3","Type":"ContainerDied","Data":"d2ee21aaa4feb3b4832d8c66554e954d026225c54b59dbbf5e3e779d9d9521f5"} Sep 29 13:48:58 crc kubenswrapper[4634]: I0929 13:48:58.774022 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s569s" event={"ID":"1c8311b2-d781-4903-961e-33b0b839aeae","Type":"ContainerStarted","Data":"a8a52f5d8be99f8e9427077a3f42b51b453a3218a3a08b833fa978f2cb0548d5"} Sep 29 13:48:58 crc kubenswrapper[4634]: I0929 13:48:58.776624 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2nr7c" event={"ID":"b28ef713-38f8-4b94-a0fc-bc83b791a6d3","Type":"ContainerStarted","Data":"1f9a3624bd28f5a4500fa58effb93155365c2d47c8bb90e2780b81c1eaf3dc30"} Sep 29 13:48:58 crc kubenswrapper[4634]: I0929 13:48:58.791022 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s569s" podStartSLOduration=2.116522138 podStartE2EDuration="4.790999437s" podCreationTimestamp="2025-09-29 13:48:54 +0000 UTC" firstStartedPulling="2025-09-29 13:48:55.724763624 +0000 UTC m=+266.293491373" lastFinishedPulling="2025-09-29 13:48:58.399240923 +0000 UTC m=+268.967968672" observedRunningTime="2025-09-29 13:48:58.787054799 +0000 UTC m=+269.355782568" watchObservedRunningTime="2025-09-29 13:48:58.790999437 +0000 UTC m=+269.359727186" Sep 29 13:48:58 crc kubenswrapper[4634]: I0929 13:48:58.803402 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2nr7c" podStartSLOduration=2.318136257 podStartE2EDuration="4.803387917s" podCreationTimestamp="2025-09-29 13:48:54 +0000 UTC" firstStartedPulling="2025-09-29 13:48:55.731863645 +0000 UTC m=+266.300591394" lastFinishedPulling="2025-09-29 13:48:58.217115285 +0000 UTC m=+268.785843054" observedRunningTime="2025-09-29 13:48:58.800824441 +0000 UTC m=+269.369552190" watchObservedRunningTime="2025-09-29 13:48:58.803387917 +0000 UTC m=+269.372115666" Sep 29 13:49:02 crc kubenswrapper[4634]: I0929 13:49:02.466361 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:49:02 crc kubenswrapper[4634]: I0929 13:49:02.466672 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:49:02 crc kubenswrapper[4634]: I0929 13:49:02.506333 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:49:02 crc kubenswrapper[4634]: I0929 13:49:02.665917 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:49:02 crc kubenswrapper[4634]: I0929 13:49:02.666195 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:49:02 crc kubenswrapper[4634]: I0929 13:49:02.709366 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:49:02 crc kubenswrapper[4634]: I0929 13:49:02.827585 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:49:02 crc kubenswrapper[4634]: I0929 13:49:02.828791 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qkgkn" Sep 29 13:49:04 crc kubenswrapper[4634]: I0929 13:49:04.901870 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:49:04 crc kubenswrapper[4634]: I0929 13:49:04.901931 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:49:04 crc kubenswrapper[4634]: I0929 13:49:04.942390 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:49:05 crc kubenswrapper[4634]: I0929 13:49:05.119942 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:49:05 crc kubenswrapper[4634]: I0929 13:49:05.120202 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:49:05 crc kubenswrapper[4634]: I0929 13:49:05.185998 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:49:05 crc kubenswrapper[4634]: I0929 13:49:05.851839 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2nr7c" Sep 29 13:49:05 crc kubenswrapper[4634]: I0929 13:49:05.867102 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s569s" Sep 29 13:50:14 crc kubenswrapper[4634]: I0929 13:50:14.396475 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:50:14 crc kubenswrapper[4634]: I0929 13:50:14.396989 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:50:44 crc kubenswrapper[4634]: I0929 13:50:44.396149 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:50:44 crc kubenswrapper[4634]: I0929 13:50:44.396761 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:51:14 crc kubenswrapper[4634]: I0929 13:51:14.395605 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:51:14 crc kubenswrapper[4634]: I0929 13:51:14.396249 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:51:14 crc kubenswrapper[4634]: I0929 13:51:14.396302 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:51:14 crc kubenswrapper[4634]: I0929 13:51:14.396841 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9dabd1bf385841cebfd86dae33dc8ce33877bc83fd06ad816398ab585db5694d"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:51:14 crc kubenswrapper[4634]: I0929 13:51:14.396890 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://9dabd1bf385841cebfd86dae33dc8ce33877bc83fd06ad816398ab585db5694d" gracePeriod=600 Sep 29 13:51:14 crc kubenswrapper[4634]: E0929 13:51:14.472674 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9173d45a_da12_4090_92c3_65ad4dcec715.slice/crio-9dabd1bf385841cebfd86dae33dc8ce33877bc83fd06ad816398ab585db5694d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 13:51:14 crc kubenswrapper[4634]: I0929 13:51:14.612414 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="9dabd1bf385841cebfd86dae33dc8ce33877bc83fd06ad816398ab585db5694d" exitCode=0 Sep 29 13:51:14 crc kubenswrapper[4634]: I0929 13:51:14.612465 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"9dabd1bf385841cebfd86dae33dc8ce33877bc83fd06ad816398ab585db5694d"} Sep 29 13:51:14 crc kubenswrapper[4634]: I0929 13:51:14.612501 4634 scope.go:117] "RemoveContainer" containerID="b3a88153137eaaa134860010f973d02c9c32dca671af025b328ccf5684979bfd" Sep 29 13:51:15 crc kubenswrapper[4634]: I0929 13:51:15.619742 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"612b3e56f8cc9f853bca3919763d42da7657a071824b09305d83106a338a3a6e"} Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.496352 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7dtkr"] Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.497478 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.513599 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7dtkr"] Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.616469 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/91dce15c-3d38-495e-89d1-1f10306c92f0-registry-certificates\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.616520 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.616544 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-registry-tls\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.616562 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/91dce15c-3d38-495e-89d1-1f10306c92f0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.616713 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91dce15c-3d38-495e-89d1-1f10306c92f0-trusted-ca\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.616837 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-bound-sa-token\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.616897 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/91dce15c-3d38-495e-89d1-1f10306c92f0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.616971 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx6nc\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-kube-api-access-sx6nc\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.678044 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.718213 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-bound-sa-token\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.718270 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/91dce15c-3d38-495e-89d1-1f10306c92f0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.718296 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx6nc\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-kube-api-access-sx6nc\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.718323 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/91dce15c-3d38-495e-89d1-1f10306c92f0-registry-certificates\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.718348 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-registry-tls\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.718367 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/91dce15c-3d38-495e-89d1-1f10306c92f0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.718391 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91dce15c-3d38-495e-89d1-1f10306c92f0-trusted-ca\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.718746 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/91dce15c-3d38-495e-89d1-1f10306c92f0-ca-trust-extracted\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.719503 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/91dce15c-3d38-495e-89d1-1f10306c92f0-trusted-ca\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.720659 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/91dce15c-3d38-495e-89d1-1f10306c92f0-registry-certificates\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.723956 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-registry-tls\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.726525 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/91dce15c-3d38-495e-89d1-1f10306c92f0-installation-pull-secrets\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.734791 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx6nc\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-kube-api-access-sx6nc\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.735324 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/91dce15c-3d38-495e-89d1-1f10306c92f0-bound-sa-token\") pod \"image-registry-66df7c8f76-7dtkr\" (UID: \"91dce15c-3d38-495e-89d1-1f10306c92f0\") " pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:45 crc kubenswrapper[4634]: I0929 13:51:45.816211 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:46 crc kubenswrapper[4634]: I0929 13:51:46.033582 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-7dtkr"] Sep 29 13:51:46 crc kubenswrapper[4634]: I0929 13:51:46.820616 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" event={"ID":"91dce15c-3d38-495e-89d1-1f10306c92f0","Type":"ContainerStarted","Data":"4dbf290c49fabe8407de5b720d034287f7584effdc520bf76de38d6e0fae8147"} Sep 29 13:51:46 crc kubenswrapper[4634]: I0929 13:51:46.820702 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" event={"ID":"91dce15c-3d38-495e-89d1-1f10306c92f0","Type":"ContainerStarted","Data":"cd10f36a3bead3dd4d278e1f332e610f60ae84ef6dff1d103a88ea502cc16e08"} Sep 29 13:51:46 crc kubenswrapper[4634]: I0929 13:51:46.822599 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:51:46 crc kubenswrapper[4634]: I0929 13:51:46.853807 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" podStartSLOduration=1.853784184 podStartE2EDuration="1.853784184s" podCreationTimestamp="2025-09-29 13:51:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:51:46.851262312 +0000 UTC m=+437.419990151" watchObservedRunningTime="2025-09-29 13:51:46.853784184 +0000 UTC m=+437.422511973" Sep 29 13:52:05 crc kubenswrapper[4634]: I0929 13:52:05.825664 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-7dtkr" Sep 29 13:52:05 crc kubenswrapper[4634]: I0929 13:52:05.898130 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwzp5"] Sep 29 13:52:30 crc kubenswrapper[4634]: I0929 13:52:30.953555 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" podUID="a4bc890e-57a2-4633-88bf-cb66c90293e8" containerName="registry" containerID="cri-o://07211b46ca588d7805b8f3914ab9c79152702771e4a3ab52fffd492ec7209f8a" gracePeriod=30 Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.128889 4634 generic.go:334] "Generic (PLEG): container finished" podID="a4bc890e-57a2-4633-88bf-cb66c90293e8" containerID="07211b46ca588d7805b8f3914ab9c79152702771e4a3ab52fffd492ec7209f8a" exitCode=0 Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.128959 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" event={"ID":"a4bc890e-57a2-4633-88bf-cb66c90293e8","Type":"ContainerDied","Data":"07211b46ca588d7805b8f3914ab9c79152702771e4a3ab52fffd492ec7209f8a"} Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.354011 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.486882 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4bc890e-57a2-4633-88bf-cb66c90293e8-ca-trust-extracted\") pod \"a4bc890e-57a2-4633-88bf-cb66c90293e8\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.487007 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwds8\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-kube-api-access-qwds8\") pod \"a4bc890e-57a2-4633-88bf-cb66c90293e8\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.487073 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-certificates\") pod \"a4bc890e-57a2-4633-88bf-cb66c90293e8\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.487172 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-trusted-ca\") pod \"a4bc890e-57a2-4633-88bf-cb66c90293e8\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.487220 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4bc890e-57a2-4633-88bf-cb66c90293e8-installation-pull-secrets\") pod \"a4bc890e-57a2-4633-88bf-cb66c90293e8\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.487276 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-bound-sa-token\") pod \"a4bc890e-57a2-4633-88bf-cb66c90293e8\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.487502 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"a4bc890e-57a2-4633-88bf-cb66c90293e8\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.487604 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-tls\") pod \"a4bc890e-57a2-4633-88bf-cb66c90293e8\" (UID: \"a4bc890e-57a2-4633-88bf-cb66c90293e8\") " Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.488909 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "a4bc890e-57a2-4633-88bf-cb66c90293e8" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.490044 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a4bc890e-57a2-4633-88bf-cb66c90293e8" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.495395 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4bc890e-57a2-4633-88bf-cb66c90293e8-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "a4bc890e-57a2-4633-88bf-cb66c90293e8" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.495921 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-kube-api-access-qwds8" (OuterVolumeSpecName: "kube-api-access-qwds8") pod "a4bc890e-57a2-4633-88bf-cb66c90293e8" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8"). InnerVolumeSpecName "kube-api-access-qwds8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.505540 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a4bc890e-57a2-4633-88bf-cb66c90293e8" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.509422 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "a4bc890e-57a2-4633-88bf-cb66c90293e8" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.513411 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "a4bc890e-57a2-4633-88bf-cb66c90293e8" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.515028 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4bc890e-57a2-4633-88bf-cb66c90293e8-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "a4bc890e-57a2-4633-88bf-cb66c90293e8" (UID: "a4bc890e-57a2-4633-88bf-cb66c90293e8"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.589298 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwds8\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-kube-api-access-qwds8\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.589350 4634 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.589371 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a4bc890e-57a2-4633-88bf-cb66c90293e8-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.589393 4634 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a4bc890e-57a2-4633-88bf-cb66c90293e8-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.589412 4634 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.589430 4634 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a4bc890e-57a2-4633-88bf-cb66c90293e8-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:31 crc kubenswrapper[4634]: I0929 13:52:31.589447 4634 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a4bc890e-57a2-4633-88bf-cb66c90293e8-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 13:52:32 crc kubenswrapper[4634]: I0929 13:52:32.150851 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" event={"ID":"a4bc890e-57a2-4633-88bf-cb66c90293e8","Type":"ContainerDied","Data":"4183ec6db0b153f2ace68500beba4226bff72cc29dd4ac892c5f07c981682cf8"} Sep 29 13:52:32 crc kubenswrapper[4634]: I0929 13:52:32.150965 4634 scope.go:117] "RemoveContainer" containerID="07211b46ca588d7805b8f3914ab9c79152702771e4a3ab52fffd492ec7209f8a" Sep 29 13:52:32 crc kubenswrapper[4634]: I0929 13:52:32.150992 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xwzp5" Sep 29 13:52:32 crc kubenswrapper[4634]: I0929 13:52:32.205396 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwzp5"] Sep 29 13:52:32 crc kubenswrapper[4634]: I0929 13:52:32.213746 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xwzp5"] Sep 29 13:52:34 crc kubenswrapper[4634]: I0929 13:52:34.123728 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4bc890e-57a2-4633-88bf-cb66c90293e8" path="/var/lib/kubelet/pods/a4bc890e-57a2-4633-88bf-cb66c90293e8/volumes" Sep 29 13:53:14 crc kubenswrapper[4634]: I0929 13:53:14.396884 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:53:14 crc kubenswrapper[4634]: I0929 13:53:14.398185 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:53:44 crc kubenswrapper[4634]: I0929 13:53:44.396234 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:53:44 crc kubenswrapper[4634]: I0929 13:53:44.396835 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.396015 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.397075 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.397264 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.398149 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"612b3e56f8cc9f853bca3919763d42da7657a071824b09305d83106a338a3a6e"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.398263 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://612b3e56f8cc9f853bca3919763d42da7657a071824b09305d83106a338a3a6e" gracePeriod=600 Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.815450 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="612b3e56f8cc9f853bca3919763d42da7657a071824b09305d83106a338a3a6e" exitCode=0 Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.815599 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"612b3e56f8cc9f853bca3919763d42da7657a071824b09305d83106a338a3a6e"} Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.816036 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"dbcc2440180d99fb45da22933b773ac34313e312284872b76ccc1d05c2cec895"} Sep 29 13:54:14 crc kubenswrapper[4634]: I0929 13:54:14.816100 4634 scope.go:117] "RemoveContainer" containerID="9dabd1bf385841cebfd86dae33dc8ce33877bc83fd06ad816398ab585db5694d" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.402258 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-fssvx"] Sep 29 13:55:46 crc kubenswrapper[4634]: E0929 13:55:46.402936 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4bc890e-57a2-4633-88bf-cb66c90293e8" containerName="registry" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.402948 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4bc890e-57a2-4633-88bf-cb66c90293e8" containerName="registry" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.403043 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4bc890e-57a2-4633-88bf-cb66c90293e8" containerName="registry" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.403462 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-fssvx" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.406636 4634 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-jdwg4" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.407320 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.408060 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.426793 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-fssvx"] Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.428301 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7xsf\" (UniqueName: \"kubernetes.io/projected/f7a64d90-3df0-4013-9334-10cb44b056d0-kube-api-access-l7xsf\") pod \"cert-manager-cainjector-7f985d654d-fssvx\" (UID: \"f7a64d90-3df0-4013-9334-10cb44b056d0\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-fssvx" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.435530 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-whcmj"] Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.436286 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.437844 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-2vbph"] Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.438494 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-2vbph" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.440840 4634 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-zrnv6" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.441241 4634 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-t5fgv" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.466937 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-2vbph"] Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.469930 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-whcmj"] Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.529512 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7xsf\" (UniqueName: \"kubernetes.io/projected/f7a64d90-3df0-4013-9334-10cb44b056d0-kube-api-access-l7xsf\") pod \"cert-manager-cainjector-7f985d654d-fssvx\" (UID: \"f7a64d90-3df0-4013-9334-10cb44b056d0\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-fssvx" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.529581 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx527\" (UniqueName: \"kubernetes.io/projected/952ce650-52ed-4dcb-88bb-d9f9ce5a69ed-kube-api-access-nx527\") pod \"cert-manager-webhook-5655c58dd6-whcmj\" (UID: \"952ce650-52ed-4dcb-88bb-d9f9ce5a69ed\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.529608 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn59d\" (UniqueName: \"kubernetes.io/projected/d2d8a3b4-5469-4e43-853a-68ea314698d5-kube-api-access-kn59d\") pod \"cert-manager-5b446d88c5-2vbph\" (UID: \"d2d8a3b4-5469-4e43-853a-68ea314698d5\") " pod="cert-manager/cert-manager-5b446d88c5-2vbph" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.547694 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7xsf\" (UniqueName: \"kubernetes.io/projected/f7a64d90-3df0-4013-9334-10cb44b056d0-kube-api-access-l7xsf\") pod \"cert-manager-cainjector-7f985d654d-fssvx\" (UID: \"f7a64d90-3df0-4013-9334-10cb44b056d0\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-fssvx" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.631063 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx527\" (UniqueName: \"kubernetes.io/projected/952ce650-52ed-4dcb-88bb-d9f9ce5a69ed-kube-api-access-nx527\") pod \"cert-manager-webhook-5655c58dd6-whcmj\" (UID: \"952ce650-52ed-4dcb-88bb-d9f9ce5a69ed\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.631151 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn59d\" (UniqueName: \"kubernetes.io/projected/d2d8a3b4-5469-4e43-853a-68ea314698d5-kube-api-access-kn59d\") pod \"cert-manager-5b446d88c5-2vbph\" (UID: \"d2d8a3b4-5469-4e43-853a-68ea314698d5\") " pod="cert-manager/cert-manager-5b446d88c5-2vbph" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.646139 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx527\" (UniqueName: \"kubernetes.io/projected/952ce650-52ed-4dcb-88bb-d9f9ce5a69ed-kube-api-access-nx527\") pod \"cert-manager-webhook-5655c58dd6-whcmj\" (UID: \"952ce650-52ed-4dcb-88bb-d9f9ce5a69ed\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.646502 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn59d\" (UniqueName: \"kubernetes.io/projected/d2d8a3b4-5469-4e43-853a-68ea314698d5-kube-api-access-kn59d\") pod \"cert-manager-5b446d88c5-2vbph\" (UID: \"d2d8a3b4-5469-4e43-853a-68ea314698d5\") " pod="cert-manager/cert-manager-5b446d88c5-2vbph" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.719965 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-fssvx" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.748878 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" Sep 29 13:55:46 crc kubenswrapper[4634]: I0929 13:55:46.769958 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-2vbph" Sep 29 13:55:47 crc kubenswrapper[4634]: I0929 13:55:47.017983 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-whcmj"] Sep 29 13:55:47 crc kubenswrapper[4634]: W0929 13:55:47.029277 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod952ce650_52ed_4dcb_88bb_d9f9ce5a69ed.slice/crio-124952c2f578e2018b0e5a943c6ae871d71c79fb59f9b63eca0cd5c3a0e36407 WatchSource:0}: Error finding container 124952c2f578e2018b0e5a943c6ae871d71c79fb59f9b63eca0cd5c3a0e36407: Status 404 returned error can't find the container with id 124952c2f578e2018b0e5a943c6ae871d71c79fb59f9b63eca0cd5c3a0e36407 Sep 29 13:55:47 crc kubenswrapper[4634]: I0929 13:55:47.031624 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 13:55:47 crc kubenswrapper[4634]: I0929 13:55:47.051820 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-2vbph"] Sep 29 13:55:47 crc kubenswrapper[4634]: W0929 13:55:47.060649 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2d8a3b4_5469_4e43_853a_68ea314698d5.slice/crio-45328ac1f50a7e36404cd4be93c705bacdddc27c5ff9565ba9d536a1ea995f9a WatchSource:0}: Error finding container 45328ac1f50a7e36404cd4be93c705bacdddc27c5ff9565ba9d536a1ea995f9a: Status 404 returned error can't find the container with id 45328ac1f50a7e36404cd4be93c705bacdddc27c5ff9565ba9d536a1ea995f9a Sep 29 13:55:47 crc kubenswrapper[4634]: I0929 13:55:47.163739 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-fssvx"] Sep 29 13:55:47 crc kubenswrapper[4634]: W0929 13:55:47.172314 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7a64d90_3df0_4013_9334_10cb44b056d0.slice/crio-c4e2d63d6d82829ef8c9114515c15e22c2901b8bcb46b4d3bc5ddb3abe9c66a7 WatchSource:0}: Error finding container c4e2d63d6d82829ef8c9114515c15e22c2901b8bcb46b4d3bc5ddb3abe9c66a7: Status 404 returned error can't find the container with id c4e2d63d6d82829ef8c9114515c15e22c2901b8bcb46b4d3bc5ddb3abe9c66a7 Sep 29 13:55:47 crc kubenswrapper[4634]: I0929 13:55:47.401800 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-2vbph" event={"ID":"d2d8a3b4-5469-4e43-853a-68ea314698d5","Type":"ContainerStarted","Data":"45328ac1f50a7e36404cd4be93c705bacdddc27c5ff9565ba9d536a1ea995f9a"} Sep 29 13:55:47 crc kubenswrapper[4634]: I0929 13:55:47.402701 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" event={"ID":"952ce650-52ed-4dcb-88bb-d9f9ce5a69ed","Type":"ContainerStarted","Data":"124952c2f578e2018b0e5a943c6ae871d71c79fb59f9b63eca0cd5c3a0e36407"} Sep 29 13:55:47 crc kubenswrapper[4634]: I0929 13:55:47.403654 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-fssvx" event={"ID":"f7a64d90-3df0-4013-9334-10cb44b056d0","Type":"ContainerStarted","Data":"c4e2d63d6d82829ef8c9114515c15e22c2901b8bcb46b4d3bc5ddb3abe9c66a7"} Sep 29 13:55:50 crc kubenswrapper[4634]: I0929 13:55:50.432214 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-2vbph" event={"ID":"d2d8a3b4-5469-4e43-853a-68ea314698d5","Type":"ContainerStarted","Data":"bcf02e953c0b1bc202909f810995af90ddff2ebd03434830d4e59eb8f5f5d532"} Sep 29 13:55:50 crc kubenswrapper[4634]: I0929 13:55:50.440032 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" event={"ID":"952ce650-52ed-4dcb-88bb-d9f9ce5a69ed","Type":"ContainerStarted","Data":"0b27d5c893caf48b8cac27c448d7e9076c7641970b2804774c283657f3004442"} Sep 29 13:55:50 crc kubenswrapper[4634]: I0929 13:55:50.440168 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" Sep 29 13:55:50 crc kubenswrapper[4634]: I0929 13:55:50.441935 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-fssvx" event={"ID":"f7a64d90-3df0-4013-9334-10cb44b056d0","Type":"ContainerStarted","Data":"8de6482ef57918464aa375a011161cc5487daeaae9fb39c3c0070c38f60ca98d"} Sep 29 13:55:50 crc kubenswrapper[4634]: I0929 13:55:50.455077 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-2vbph" podStartSLOduration=1.347408493 podStartE2EDuration="4.455050799s" podCreationTimestamp="2025-09-29 13:55:46 +0000 UTC" firstStartedPulling="2025-09-29 13:55:47.063109832 +0000 UTC m=+677.631837581" lastFinishedPulling="2025-09-29 13:55:50.170752128 +0000 UTC m=+680.739479887" observedRunningTime="2025-09-29 13:55:50.449108829 +0000 UTC m=+681.017836578" watchObservedRunningTime="2025-09-29 13:55:50.455050799 +0000 UTC m=+681.023778568" Sep 29 13:55:50 crc kubenswrapper[4634]: I0929 13:55:50.486681 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-fssvx" podStartSLOduration=1.479174536 podStartE2EDuration="4.486662867s" podCreationTimestamp="2025-09-29 13:55:46 +0000 UTC" firstStartedPulling="2025-09-29 13:55:47.17449145 +0000 UTC m=+677.743219189" lastFinishedPulling="2025-09-29 13:55:50.181979761 +0000 UTC m=+680.750707520" observedRunningTime="2025-09-29 13:55:50.47248666 +0000 UTC m=+681.041214409" watchObservedRunningTime="2025-09-29 13:55:50.486662867 +0000 UTC m=+681.055390616" Sep 29 13:55:50 crc kubenswrapper[4634]: I0929 13:55:50.487807 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" podStartSLOduration=1.343916384 podStartE2EDuration="4.48780204s" podCreationTimestamp="2025-09-29 13:55:46 +0000 UTC" firstStartedPulling="2025-09-29 13:55:47.03132588 +0000 UTC m=+677.600053629" lastFinishedPulling="2025-09-29 13:55:50.175211536 +0000 UTC m=+680.743939285" observedRunningTime="2025-09-29 13:55:50.486047609 +0000 UTC m=+681.054775358" watchObservedRunningTime="2025-09-29 13:55:50.48780204 +0000 UTC m=+681.056529789" Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.752896 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-whcmj" Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.894959 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jqlh6"] Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.895443 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovn-controller" containerID="cri-o://f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422" gracePeriod=30 Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.895511 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="sbdb" containerID="cri-o://7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af" gracePeriod=30 Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.895564 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kube-rbac-proxy-node" containerID="cri-o://31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad" gracePeriod=30 Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.895630 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="northd" containerID="cri-o://03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778" gracePeriod=30 Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.895679 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee" gracePeriod=30 Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.895471 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="nbdb" containerID="cri-o://2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421" gracePeriod=30 Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.895582 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovn-acl-logging" containerID="cri-o://289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109" gracePeriod=30 Sep 29 13:55:56 crc kubenswrapper[4634]: I0929 13:55:56.942562 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" containerID="cri-o://7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60" gracePeriod=30 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.163948 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/3.log" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.166795 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovn-acl-logging/0.log" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.167409 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovn-controller/0.log" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.167870 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232015 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-tvbgj"] Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232218 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kubecfg-setup" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232230 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kubecfg-setup" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232243 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232248 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232257 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232263 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232270 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kube-rbac-proxy-node" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232276 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kube-rbac-proxy-node" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232284 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="sbdb" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232290 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="sbdb" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232298 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232304 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232313 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovn-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232319 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovn-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232328 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovn-acl-logging" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232333 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovn-acl-logging" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232339 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232346 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232354 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="nbdb" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232360 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="nbdb" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232370 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="northd" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232376 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="northd" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232460 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232469 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232475 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232484 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="sbdb" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232492 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232499 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="northd" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232508 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovn-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232513 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="kube-rbac-proxy-node" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232523 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="nbdb" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232530 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovn-acl-logging" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232609 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232616 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.232626 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232633 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232737 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.232747 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerName="ovnkube-controller" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.234209 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.294925 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-var-lib-openvswitch\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.294963 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-ovn-kubernetes\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.294981 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295010 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k24j2\" (UniqueName: \"kubernetes.io/projected/65f06677-4cbf-41c9-a0da-02f49710c11c-kube-api-access-k24j2\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295025 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-openvswitch\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295043 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65f06677-4cbf-41c9-a0da-02f49710c11c-ovn-node-metrics-cert\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295070 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-systemd\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295099 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-log-socket\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295131 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-env-overrides\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295150 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-config\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295170 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-etc-openvswitch\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295192 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-slash\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295210 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-netns\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295227 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-netd\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295246 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-script-lib\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295267 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-node-log\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295285 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-ovn\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295299 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-bin\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295317 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-systemd-units\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295331 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-kubelet\") pod \"65f06677-4cbf-41c9-a0da-02f49710c11c\" (UID: \"65f06677-4cbf-41c9-a0da-02f49710c11c\") " Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295856 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.295961 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.296032 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.296521 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.296553 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297076 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297125 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-node-log" (OuterVolumeSpecName: "node-log") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297145 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297164 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297183 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297202 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297409 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297471 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-slash" (OuterVolumeSpecName: "host-slash") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297472 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297504 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-log-socket" (OuterVolumeSpecName: "log-socket") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.297648 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.298137 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.302822 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65f06677-4cbf-41c9-a0da-02f49710c11c-kube-api-access-k24j2" (OuterVolumeSpecName: "kube-api-access-k24j2") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "kube-api-access-k24j2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.305351 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65f06677-4cbf-41c9-a0da-02f49710c11c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.311412 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "65f06677-4cbf-41c9-a0da-02f49710c11c" (UID: "65f06677-4cbf-41c9-a0da-02f49710c11c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.396900 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.396958 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-etc-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.396993 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-log-socket\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397016 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-run-netns\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397035 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-node-log\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397074 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-env-overrides\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397116 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-cni-netd\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397140 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bhq6\" (UniqueName: \"kubernetes.io/projected/00bd122b-e43c-45bf-91d3-d7d363892ffb-kube-api-access-9bhq6\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397164 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-run-ovn-kubernetes\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397187 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-kubelet\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397212 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-slash\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397233 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-systemd\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397256 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-ovn\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397287 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-cni-bin\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397309 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-var-lib-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397329 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovnkube-config\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397348 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovn-node-metrics-cert\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397483 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-systemd-units\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397535 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovnkube-script-lib\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397560 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397607 4634 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397622 4634 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397635 4634 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397647 4634 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-slash\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397659 4634 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397671 4634 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397682 4634 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/65f06677-4cbf-41c9-a0da-02f49710c11c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397692 4634 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-node-log\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397702 4634 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397712 4634 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397723 4634 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397734 4634 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397746 4634 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397757 4634 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397768 4634 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397782 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k24j2\" (UniqueName: \"kubernetes.io/projected/65f06677-4cbf-41c9-a0da-02f49710c11c-kube-api-access-k24j2\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397795 4634 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397806 4634 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65f06677-4cbf-41c9-a0da-02f49710c11c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397817 4634 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.397829 4634 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/65f06677-4cbf-41c9-a0da-02f49710c11c-log-socket\") on node \"crc\" DevicePath \"\"" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.488678 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/2.log" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.489288 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/1.log" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.489398 4634 generic.go:334] "Generic (PLEG): container finished" podID="77b5113e-50cd-417c-8991-cae5cd823f3f" containerID="639b3b2ff647f9ebecb99109c34868c82c3aeda6e6eb0d1a1abf777bb5bb4643" exitCode=2 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.489471 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wtnjd" event={"ID":"77b5113e-50cd-417c-8991-cae5cd823f3f","Type":"ContainerDied","Data":"639b3b2ff647f9ebecb99109c34868c82c3aeda6e6eb0d1a1abf777bb5bb4643"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.489567 4634 scope.go:117] "RemoveContainer" containerID="ba4878d2be9128e1b0c6f396c8e485ac2e4a37d7e122ee16669814229e22bda3" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.490317 4634 scope.go:117] "RemoveContainer" containerID="639b3b2ff647f9ebecb99109c34868c82c3aeda6e6eb0d1a1abf777bb5bb4643" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.490704 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-wtnjd_openshift-multus(77b5113e-50cd-417c-8991-cae5cd823f3f)\"" pod="openshift-multus/multus-wtnjd" podUID="77b5113e-50cd-417c-8991-cae5cd823f3f" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.493723 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovnkube-controller/3.log" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.497523 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovn-acl-logging/0.log" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498007 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-jqlh6_65f06677-4cbf-41c9-a0da-02f49710c11c/ovn-controller/0.log" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498471 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60" exitCode=0 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498493 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af" exitCode=0 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498503 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421" exitCode=0 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498511 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778" exitCode=0 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498518 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee" exitCode=0 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498524 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad" exitCode=0 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498530 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109" exitCode=143 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498537 4634 generic.go:334] "Generic (PLEG): container finished" podID="65f06677-4cbf-41c9-a0da-02f49710c11c" containerID="f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422" exitCode=143 Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498555 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498579 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498593 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498603 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498612 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498622 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498633 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498641 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498647 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498653 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498659 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498666 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498673 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498675 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498680 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498836 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498845 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498857 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498868 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498874 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498879 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498884 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498889 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498893 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498898 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498903 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498907 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498912 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498919 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498927 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498933 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498938 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498944 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498948 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498954 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498959 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498964 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498968 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498974 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498981 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jqlh6" event={"ID":"65f06677-4cbf-41c9-a0da-02f49710c11c","Type":"ContainerDied","Data":"5d52462b034d716eab641671b1df7b79977796f8dd21f17843728c22f6ff035f"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498988 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498994 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.498999 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499004 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499010 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499015 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499020 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499024 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499031 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499035 4634 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499539 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-kubelet\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499561 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-slash\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499580 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-systemd\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499596 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-ovn\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499622 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-cni-bin\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499639 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-var-lib-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499655 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovnkube-config\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499672 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovn-node-metrics-cert\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499692 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-systemd-units\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499709 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovnkube-script-lib\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499727 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499744 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499759 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-etc-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499779 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-log-socket\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499795 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-run-netns\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499808 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-node-log\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499857 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-env-overrides\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499874 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-cni-netd\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499888 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bhq6\" (UniqueName: \"kubernetes.io/projected/00bd122b-e43c-45bf-91d3-d7d363892ffb-kube-api-access-9bhq6\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499904 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-run-ovn-kubernetes\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499961 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-run-ovn-kubernetes\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.499990 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-kubelet\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.500008 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-slash\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.500027 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-systemd\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.500046 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-ovn\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.500065 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-cni-bin\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.500102 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-var-lib-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.500776 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovnkube-config\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.501283 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-run-netns\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.501370 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-systemd-units\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.501978 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-node-log\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.502117 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-run-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.502130 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.502229 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-host-cni-netd\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.502595 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-etc-openvswitch\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.502621 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/00bd122b-e43c-45bf-91d3-d7d363892ffb-log-socket\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.503188 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-env-overrides\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.504325 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovnkube-script-lib\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.505037 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/00bd122b-e43c-45bf-91d3-d7d363892ffb-ovn-node-metrics-cert\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.527234 4634 scope.go:117] "RemoveContainer" containerID="7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.533422 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bhq6\" (UniqueName: \"kubernetes.io/projected/00bd122b-e43c-45bf-91d3-d7d363892ffb-kube-api-access-9bhq6\") pod \"ovnkube-node-tvbgj\" (UID: \"00bd122b-e43c-45bf-91d3-d7d363892ffb\") " pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.548890 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.560328 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.563341 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jqlh6"] Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.569590 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jqlh6"] Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.578521 4634 scope.go:117] "RemoveContainer" containerID="7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.594832 4634 scope.go:117] "RemoveContainer" containerID="2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.624363 4634 scope.go:117] "RemoveContainer" containerID="03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.639642 4634 scope.go:117] "RemoveContainer" containerID="4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.661671 4634 scope.go:117] "RemoveContainer" containerID="31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.692407 4634 scope.go:117] "RemoveContainer" containerID="289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.706161 4634 scope.go:117] "RemoveContainer" containerID="f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.725184 4634 scope.go:117] "RemoveContainer" containerID="882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.749157 4634 scope.go:117] "RemoveContainer" containerID="7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.749907 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": container with ID starting with 7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60 not found: ID does not exist" containerID="7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.749971 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} err="failed to get container status \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": rpc error: code = NotFound desc = could not find container \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": container with ID starting with 7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.750011 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.750546 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": container with ID starting with 870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217 not found: ID does not exist" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.750599 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} err="failed to get container status \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": rpc error: code = NotFound desc = could not find container \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": container with ID starting with 870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.750637 4634 scope.go:117] "RemoveContainer" containerID="7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.751001 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": container with ID starting with 7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af not found: ID does not exist" containerID="7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.751046 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} err="failed to get container status \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": rpc error: code = NotFound desc = could not find container \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": container with ID starting with 7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.751079 4634 scope.go:117] "RemoveContainer" containerID="2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.751600 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": container with ID starting with 2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421 not found: ID does not exist" containerID="2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.751634 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} err="failed to get container status \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": rpc error: code = NotFound desc = could not find container \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": container with ID starting with 2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.751669 4634 scope.go:117] "RemoveContainer" containerID="03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.752174 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": container with ID starting with 03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778 not found: ID does not exist" containerID="03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.752281 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} err="failed to get container status \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": rpc error: code = NotFound desc = could not find container \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": container with ID starting with 03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.752431 4634 scope.go:117] "RemoveContainer" containerID="4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.752985 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": container with ID starting with 4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee not found: ID does not exist" containerID="4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.753023 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} err="failed to get container status \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": rpc error: code = NotFound desc = could not find container \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": container with ID starting with 4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.753054 4634 scope.go:117] "RemoveContainer" containerID="31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.753527 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": container with ID starting with 31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad not found: ID does not exist" containerID="31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.753577 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} err="failed to get container status \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": rpc error: code = NotFound desc = could not find container \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": container with ID starting with 31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.753607 4634 scope.go:117] "RemoveContainer" containerID="289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.753996 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": container with ID starting with 289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109 not found: ID does not exist" containerID="289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.754044 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} err="failed to get container status \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": rpc error: code = NotFound desc = could not find container \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": container with ID starting with 289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.754073 4634 scope.go:117] "RemoveContainer" containerID="f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.754602 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": container with ID starting with f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422 not found: ID does not exist" containerID="f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.754641 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} err="failed to get container status \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": rpc error: code = NotFound desc = could not find container \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": container with ID starting with f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.754672 4634 scope.go:117] "RemoveContainer" containerID="882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055" Sep 29 13:55:57 crc kubenswrapper[4634]: E0929 13:55:57.755024 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": container with ID starting with 882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055 not found: ID does not exist" containerID="882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.755061 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} err="failed to get container status \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": rpc error: code = NotFound desc = could not find container \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": container with ID starting with 882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.755110 4634 scope.go:117] "RemoveContainer" containerID="7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.755433 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} err="failed to get container status \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": rpc error: code = NotFound desc = could not find container \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": container with ID starting with 7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.755467 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.755823 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} err="failed to get container status \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": rpc error: code = NotFound desc = could not find container \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": container with ID starting with 870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.755854 4634 scope.go:117] "RemoveContainer" containerID="7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.756204 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} err="failed to get container status \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": rpc error: code = NotFound desc = could not find container \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": container with ID starting with 7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.756235 4634 scope.go:117] "RemoveContainer" containerID="2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.756721 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} err="failed to get container status \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": rpc error: code = NotFound desc = could not find container \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": container with ID starting with 2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.756751 4634 scope.go:117] "RemoveContainer" containerID="03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.757558 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} err="failed to get container status \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": rpc error: code = NotFound desc = could not find container \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": container with ID starting with 03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.757594 4634 scope.go:117] "RemoveContainer" containerID="4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.757958 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} err="failed to get container status \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": rpc error: code = NotFound desc = could not find container \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": container with ID starting with 4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.757987 4634 scope.go:117] "RemoveContainer" containerID="31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.758392 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} err="failed to get container status \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": rpc error: code = NotFound desc = could not find container \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": container with ID starting with 31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.758420 4634 scope.go:117] "RemoveContainer" containerID="289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.758737 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} err="failed to get container status \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": rpc error: code = NotFound desc = could not find container \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": container with ID starting with 289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.758775 4634 scope.go:117] "RemoveContainer" containerID="f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.759232 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} err="failed to get container status \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": rpc error: code = NotFound desc = could not find container \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": container with ID starting with f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.759263 4634 scope.go:117] "RemoveContainer" containerID="882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.759547 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} err="failed to get container status \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": rpc error: code = NotFound desc = could not find container \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": container with ID starting with 882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.759582 4634 scope.go:117] "RemoveContainer" containerID="7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.759873 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} err="failed to get container status \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": rpc error: code = NotFound desc = could not find container \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": container with ID starting with 7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.759898 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.760162 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} err="failed to get container status \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": rpc error: code = NotFound desc = could not find container \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": container with ID starting with 870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.760189 4634 scope.go:117] "RemoveContainer" containerID="7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.760496 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} err="failed to get container status \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": rpc error: code = NotFound desc = could not find container \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": container with ID starting with 7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.760530 4634 scope.go:117] "RemoveContainer" containerID="2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.760807 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} err="failed to get container status \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": rpc error: code = NotFound desc = could not find container \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": container with ID starting with 2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.760839 4634 scope.go:117] "RemoveContainer" containerID="03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.761350 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} err="failed to get container status \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": rpc error: code = NotFound desc = could not find container \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": container with ID starting with 03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.761383 4634 scope.go:117] "RemoveContainer" containerID="4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.761742 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} err="failed to get container status \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": rpc error: code = NotFound desc = could not find container \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": container with ID starting with 4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.761794 4634 scope.go:117] "RemoveContainer" containerID="31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.762194 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} err="failed to get container status \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": rpc error: code = NotFound desc = could not find container \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": container with ID starting with 31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.762228 4634 scope.go:117] "RemoveContainer" containerID="289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.762526 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} err="failed to get container status \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": rpc error: code = NotFound desc = could not find container \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": container with ID starting with 289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.762550 4634 scope.go:117] "RemoveContainer" containerID="f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.762815 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} err="failed to get container status \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": rpc error: code = NotFound desc = could not find container \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": container with ID starting with f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.762840 4634 scope.go:117] "RemoveContainer" containerID="882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.763300 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} err="failed to get container status \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": rpc error: code = NotFound desc = could not find container \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": container with ID starting with 882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.763327 4634 scope.go:117] "RemoveContainer" containerID="7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.763782 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60"} err="failed to get container status \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": rpc error: code = NotFound desc = could not find container \"7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60\": container with ID starting with 7505dd70ed4d88a57c808431e0553fa55ba4566e934ac3ab6bec402276a74d60 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.763815 4634 scope.go:117] "RemoveContainer" containerID="870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.764112 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217"} err="failed to get container status \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": rpc error: code = NotFound desc = could not find container \"870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217\": container with ID starting with 870c3a96fd0aad16a338f5555391ff9faa4ef29c222fbec72f5a9bc395293217 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.764139 4634 scope.go:117] "RemoveContainer" containerID="7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.764514 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af"} err="failed to get container status \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": rpc error: code = NotFound desc = could not find container \"7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af\": container with ID starting with 7dcc25f17e450afc26e440b87711586118b917c6d1ea8711a24e3c631aa118af not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.764543 4634 scope.go:117] "RemoveContainer" containerID="2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.764927 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421"} err="failed to get container status \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": rpc error: code = NotFound desc = could not find container \"2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421\": container with ID starting with 2ef4792edf8dbdca5b24209d1db0fd2454b72c741f7140b2cff735c6c2856421 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.764958 4634 scope.go:117] "RemoveContainer" containerID="03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.765269 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778"} err="failed to get container status \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": rpc error: code = NotFound desc = could not find container \"03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778\": container with ID starting with 03b50a5d3c6cff071e2023033fc675fed10f646aafd93406f71ead2468b8d778 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.765330 4634 scope.go:117] "RemoveContainer" containerID="4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.765641 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee"} err="failed to get container status \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": rpc error: code = NotFound desc = could not find container \"4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee\": container with ID starting with 4767f49955707760901886a1a05b3c09a8f090617089238c5e1b6ffb51bd97ee not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.765667 4634 scope.go:117] "RemoveContainer" containerID="31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.765923 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad"} err="failed to get container status \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": rpc error: code = NotFound desc = could not find container \"31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad\": container with ID starting with 31b8f29ce38481ff3fb68e4e4b47c3f2508f71987f731e3588be7443fb6fa1ad not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.765951 4634 scope.go:117] "RemoveContainer" containerID="289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.766227 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109"} err="failed to get container status \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": rpc error: code = NotFound desc = could not find container \"289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109\": container with ID starting with 289f54fa074fb6177cb3eafab6d1cd7894a089362a85cd767a71cf136da8f109 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.766256 4634 scope.go:117] "RemoveContainer" containerID="f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.766588 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422"} err="failed to get container status \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": rpc error: code = NotFound desc = could not find container \"f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422\": container with ID starting with f915553a9fdbf11355005a6d7a6545a1a3a6ba47638c566122c5b1d71a4c5422 not found: ID does not exist" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.766614 4634 scope.go:117] "RemoveContainer" containerID="882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055" Sep 29 13:55:57 crc kubenswrapper[4634]: I0929 13:55:57.766876 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055"} err="failed to get container status \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": rpc error: code = NotFound desc = could not find container \"882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055\": container with ID starting with 882ad717104562983393ed9d307ae9af30b776cfe81282a3b46aaf285ba79055 not found: ID does not exist" Sep 29 13:55:58 crc kubenswrapper[4634]: I0929 13:55:58.117743 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65f06677-4cbf-41c9-a0da-02f49710c11c" path="/var/lib/kubelet/pods/65f06677-4cbf-41c9-a0da-02f49710c11c/volumes" Sep 29 13:55:58 crc kubenswrapper[4634]: I0929 13:55:58.507045 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/2.log" Sep 29 13:55:58 crc kubenswrapper[4634]: I0929 13:55:58.510182 4634 generic.go:334] "Generic (PLEG): container finished" podID="00bd122b-e43c-45bf-91d3-d7d363892ffb" containerID="3b32266179436a36d9574a4c0da89713f5c6fc9f7ac604f80cf756198993e168" exitCode=0 Sep 29 13:55:58 crc kubenswrapper[4634]: I0929 13:55:58.510220 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerDied","Data":"3b32266179436a36d9574a4c0da89713f5c6fc9f7ac604f80cf756198993e168"} Sep 29 13:55:58 crc kubenswrapper[4634]: I0929 13:55:58.510241 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"a16b486a5230ae33ffb1e284f7bbc0be9b1950d463e631ff51487f35dbec27fd"} Sep 29 13:55:59 crc kubenswrapper[4634]: I0929 13:55:59.520181 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"5b4df33d8ed0a4fb8cec8dbca04a56c3f4f3938f55c076348982ab97bdb13906"} Sep 29 13:55:59 crc kubenswrapper[4634]: I0929 13:55:59.520546 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"f05b888aec8646a6ea22bb20ea9f7e28f6fb4af0b2859c30da368fff5aeeb0a9"} Sep 29 13:55:59 crc kubenswrapper[4634]: I0929 13:55:59.520561 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"e98ca6caf1ddb7bb275dac04afa7de69c882c5193d9c9098db281f6c082f15cf"} Sep 29 13:55:59 crc kubenswrapper[4634]: I0929 13:55:59.520574 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"585149affb2030e67eff87ef399e8b8463d3cf9ac0b884b24781ab63e7b23a80"} Sep 29 13:55:59 crc kubenswrapper[4634]: I0929 13:55:59.520587 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"f1d9428d28fbc7474c709f127ca83fed705a3fd1ae04e391f2301d37749daf32"} Sep 29 13:55:59 crc kubenswrapper[4634]: I0929 13:55:59.520603 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"2cd48671d67791451399470211db7893d09a5e52b0d3ea6e6ce4ed73bc2312d3"} Sep 29 13:56:01 crc kubenswrapper[4634]: I0929 13:56:01.538218 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"3b97137afa4bb0493f53c8d81f6efc5fb4bad1ff25e4f4cd64691b398d2825fe"} Sep 29 13:56:04 crc kubenswrapper[4634]: I0929 13:56:04.561351 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" event={"ID":"00bd122b-e43c-45bf-91d3-d7d363892ffb","Type":"ContainerStarted","Data":"25b0016b17e187192dea007bcb2cf88d50eb40a23d525e9171a73941d05b441c"} Sep 29 13:56:04 crc kubenswrapper[4634]: I0929 13:56:04.561838 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:56:04 crc kubenswrapper[4634]: I0929 13:56:04.587148 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:56:04 crc kubenswrapper[4634]: I0929 13:56:04.596417 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" podStartSLOduration=7.5964005839999995 podStartE2EDuration="7.596400584s" podCreationTimestamp="2025-09-29 13:55:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:56:04.587783916 +0000 UTC m=+695.156511665" watchObservedRunningTime="2025-09-29 13:56:04.596400584 +0000 UTC m=+695.165128333" Sep 29 13:56:05 crc kubenswrapper[4634]: I0929 13:56:05.566543 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:56:05 crc kubenswrapper[4634]: I0929 13:56:05.566833 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:56:05 crc kubenswrapper[4634]: I0929 13:56:05.593250 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:56:09 crc kubenswrapper[4634]: I0929 13:56:09.110217 4634 scope.go:117] "RemoveContainer" containerID="639b3b2ff647f9ebecb99109c34868c82c3aeda6e6eb0d1a1abf777bb5bb4643" Sep 29 13:56:09 crc kubenswrapper[4634]: E0929 13:56:09.111672 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-wtnjd_openshift-multus(77b5113e-50cd-417c-8991-cae5cd823f3f)\"" pod="openshift-multus/multus-wtnjd" podUID="77b5113e-50cd-417c-8991-cae5cd823f3f" Sep 29 13:56:14 crc kubenswrapper[4634]: I0929 13:56:14.396239 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:56:14 crc kubenswrapper[4634]: I0929 13:56:14.396547 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:56:23 crc kubenswrapper[4634]: I0929 13:56:23.110760 4634 scope.go:117] "RemoveContainer" containerID="639b3b2ff647f9ebecb99109c34868c82c3aeda6e6eb0d1a1abf777bb5bb4643" Sep 29 13:56:23 crc kubenswrapper[4634]: I0929 13:56:23.678061 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wtnjd_77b5113e-50cd-417c-8991-cae5cd823f3f/kube-multus/2.log" Sep 29 13:56:23 crc kubenswrapper[4634]: I0929 13:56:23.679049 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wtnjd" event={"ID":"77b5113e-50cd-417c-8991-cae5cd823f3f","Type":"ContainerStarted","Data":"30eb00be5cc7b153d48dce5bcaef957a63e47103ffe01195e3f45b63a0fac808"} Sep 29 13:56:27 crc kubenswrapper[4634]: I0929 13:56:27.590625 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-tvbgj" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.303403 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4"] Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.304800 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.306677 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.316704 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4"] Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.359352 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdx66\" (UniqueName: \"kubernetes.io/projected/16b384c1-46b2-4b51-bf5c-689bc809e5ec-kube-api-access-sdx66\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.359401 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.359439 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.460402 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdx66\" (UniqueName: \"kubernetes.io/projected/16b384c1-46b2-4b51-bf5c-689bc809e5ec-kube-api-access-sdx66\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.460460 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.460488 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.460965 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.463355 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.483363 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdx66\" (UniqueName: \"kubernetes.io/projected/16b384c1-46b2-4b51-bf5c-689bc809e5ec-kube-api-access-sdx66\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.619057 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:38 crc kubenswrapper[4634]: I0929 13:56:38.862474 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4"] Sep 29 13:56:38 crc kubenswrapper[4634]: W0929 13:56:38.867847 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16b384c1_46b2_4b51_bf5c_689bc809e5ec.slice/crio-f7096f9989ecb39ac05a6c2e20a9c3d8d75a91e086ad9528e0d6fb274e983a10 WatchSource:0}: Error finding container f7096f9989ecb39ac05a6c2e20a9c3d8d75a91e086ad9528e0d6fb274e983a10: Status 404 returned error can't find the container with id f7096f9989ecb39ac05a6c2e20a9c3d8d75a91e086ad9528e0d6fb274e983a10 Sep 29 13:56:39 crc kubenswrapper[4634]: I0929 13:56:39.786729 4634 generic.go:334] "Generic (PLEG): container finished" podID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerID="db44367f300c61e2320d3a50e537e6c36de2c8fa68d99db7e9af1725f121df6c" exitCode=0 Sep 29 13:56:39 crc kubenswrapper[4634]: I0929 13:56:39.786840 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" event={"ID":"16b384c1-46b2-4b51-bf5c-689bc809e5ec","Type":"ContainerDied","Data":"db44367f300c61e2320d3a50e537e6c36de2c8fa68d99db7e9af1725f121df6c"} Sep 29 13:56:39 crc kubenswrapper[4634]: I0929 13:56:39.787186 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" event={"ID":"16b384c1-46b2-4b51-bf5c-689bc809e5ec","Type":"ContainerStarted","Data":"f7096f9989ecb39ac05a6c2e20a9c3d8d75a91e086ad9528e0d6fb274e983a10"} Sep 29 13:56:42 crc kubenswrapper[4634]: I0929 13:56:42.829367 4634 generic.go:334] "Generic (PLEG): container finished" podID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerID="c50388026b68cfa675248569d088dd0f6d149060a066c7b30e18793196b1dab5" exitCode=0 Sep 29 13:56:42 crc kubenswrapper[4634]: I0929 13:56:42.829822 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" event={"ID":"16b384c1-46b2-4b51-bf5c-689bc809e5ec","Type":"ContainerDied","Data":"c50388026b68cfa675248569d088dd0f6d149060a066c7b30e18793196b1dab5"} Sep 29 13:56:43 crc kubenswrapper[4634]: I0929 13:56:43.854155 4634 generic.go:334] "Generic (PLEG): container finished" podID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerID="dd7bda168ffb077b390fbfd0b157a5844bf60cfbeb4a6576ec55fc45ced47b9b" exitCode=0 Sep 29 13:56:43 crc kubenswrapper[4634]: I0929 13:56:43.854329 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" event={"ID":"16b384c1-46b2-4b51-bf5c-689bc809e5ec","Type":"ContainerDied","Data":"dd7bda168ffb077b390fbfd0b157a5844bf60cfbeb4a6576ec55fc45ced47b9b"} Sep 29 13:56:44 crc kubenswrapper[4634]: I0929 13:56:44.396410 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:56:44 crc kubenswrapper[4634]: I0929 13:56:44.396504 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.174299 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.372651 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-util\") pod \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.372851 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-bundle\") pod \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.373202 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdx66\" (UniqueName: \"kubernetes.io/projected/16b384c1-46b2-4b51-bf5c-689bc809e5ec-kube-api-access-sdx66\") pod \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\" (UID: \"16b384c1-46b2-4b51-bf5c-689bc809e5ec\") " Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.374585 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-bundle" (OuterVolumeSpecName: "bundle") pod "16b384c1-46b2-4b51-bf5c-689bc809e5ec" (UID: "16b384c1-46b2-4b51-bf5c-689bc809e5ec"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.384430 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16b384c1-46b2-4b51-bf5c-689bc809e5ec-kube-api-access-sdx66" (OuterVolumeSpecName: "kube-api-access-sdx66") pod "16b384c1-46b2-4b51-bf5c-689bc809e5ec" (UID: "16b384c1-46b2-4b51-bf5c-689bc809e5ec"). InnerVolumeSpecName "kube-api-access-sdx66". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.389722 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-util" (OuterVolumeSpecName: "util") pod "16b384c1-46b2-4b51-bf5c-689bc809e5ec" (UID: "16b384c1-46b2-4b51-bf5c-689bc809e5ec"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.475027 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdx66\" (UniqueName: \"kubernetes.io/projected/16b384c1-46b2-4b51-bf5c-689bc809e5ec-kube-api-access-sdx66\") on node \"crc\" DevicePath \"\"" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.475459 4634 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-util\") on node \"crc\" DevicePath \"\"" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.475613 4634 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/16b384c1-46b2-4b51-bf5c-689bc809e5ec-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.874011 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" event={"ID":"16b384c1-46b2-4b51-bf5c-689bc809e5ec","Type":"ContainerDied","Data":"f7096f9989ecb39ac05a6c2e20a9c3d8d75a91e086ad9528e0d6fb274e983a10"} Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.874116 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7096f9989ecb39ac05a6c2e20a9c3d8d75a91e086ad9528e0d6fb274e983a10" Sep 29 13:56:45 crc kubenswrapper[4634]: I0929 13:56:45.874146 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.934949 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb"] Sep 29 13:56:49 crc kubenswrapper[4634]: E0929 13:56:49.935402 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerName="pull" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.935412 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerName="pull" Sep 29 13:56:49 crc kubenswrapper[4634]: E0929 13:56:49.935420 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerName="extract" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.935425 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerName="extract" Sep 29 13:56:49 crc kubenswrapper[4634]: E0929 13:56:49.935437 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerName="util" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.935443 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerName="util" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.935529 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="16b384c1-46b2-4b51-bf5c-689bc809e5ec" containerName="extract" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.935869 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.937988 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.939441 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.939581 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-sq49c" Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.955855 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb"] Sep 29 13:56:49 crc kubenswrapper[4634]: I0929 13:56:49.956126 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c87jx\" (UniqueName: \"kubernetes.io/projected/805b844b-fda9-431c-a652-d9c6211769f3-kube-api-access-c87jx\") pod \"nmstate-operator-5d6f6cfd66-qnlfb\" (UID: \"805b844b-fda9-431c-a652-d9c6211769f3\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb" Sep 29 13:56:50 crc kubenswrapper[4634]: I0929 13:56:50.057737 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c87jx\" (UniqueName: \"kubernetes.io/projected/805b844b-fda9-431c-a652-d9c6211769f3-kube-api-access-c87jx\") pod \"nmstate-operator-5d6f6cfd66-qnlfb\" (UID: \"805b844b-fda9-431c-a652-d9c6211769f3\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb" Sep 29 13:56:50 crc kubenswrapper[4634]: I0929 13:56:50.088029 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c87jx\" (UniqueName: \"kubernetes.io/projected/805b844b-fda9-431c-a652-d9c6211769f3-kube-api-access-c87jx\") pod \"nmstate-operator-5d6f6cfd66-qnlfb\" (UID: \"805b844b-fda9-431c-a652-d9c6211769f3\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb" Sep 29 13:56:50 crc kubenswrapper[4634]: I0929 13:56:50.253398 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb" Sep 29 13:56:50 crc kubenswrapper[4634]: I0929 13:56:50.450474 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb"] Sep 29 13:56:50 crc kubenswrapper[4634]: W0929 13:56:50.457385 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod805b844b_fda9_431c_a652_d9c6211769f3.slice/crio-ce759ab7c1146d7de7ce237e697ed476587ede1c750dd5b1837cbf2fb3eed090 WatchSource:0}: Error finding container ce759ab7c1146d7de7ce237e697ed476587ede1c750dd5b1837cbf2fb3eed090: Status 404 returned error can't find the container with id ce759ab7c1146d7de7ce237e697ed476587ede1c750dd5b1837cbf2fb3eed090 Sep 29 13:56:50 crc kubenswrapper[4634]: I0929 13:56:50.910403 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb" event={"ID":"805b844b-fda9-431c-a652-d9c6211769f3","Type":"ContainerStarted","Data":"ce759ab7c1146d7de7ce237e697ed476587ede1c750dd5b1837cbf2fb3eed090"} Sep 29 13:56:53 crc kubenswrapper[4634]: I0929 13:56:53.936830 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb" event={"ID":"805b844b-fda9-431c-a652-d9c6211769f3","Type":"ContainerStarted","Data":"49827a8e7e2af4383175784650f364f61c6b88db019b8ca263ffe2f6217188e9"} Sep 29 13:56:53 crc kubenswrapper[4634]: I0929 13:56:53.968678 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-qnlfb" podStartSLOduration=2.386784868 podStartE2EDuration="4.968617302s" podCreationTimestamp="2025-09-29 13:56:49 +0000 UTC" firstStartedPulling="2025-09-29 13:56:50.460769099 +0000 UTC m=+741.029496848" lastFinishedPulling="2025-09-29 13:56:53.042601493 +0000 UTC m=+743.611329282" observedRunningTime="2025-09-29 13:56:53.958460572 +0000 UTC m=+744.527188361" watchObservedRunningTime="2025-09-29 13:56:53.968617302 +0000 UTC m=+744.537345121" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.070408 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.071736 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.074937 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-hkw8n" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.097210 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-4hp98"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.097453 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm6kr\" (UniqueName: \"kubernetes.io/projected/b4bdb338-a719-4d83-a12a-f0b18a589d65-kube-api-access-tm6kr\") pod \"nmstate-metrics-58fcddf996-6h8nl\" (UID: \"b4bdb338-a719-4d83-a12a-f0b18a589d65\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.098051 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.104312 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.105170 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.118503 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-przz9"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.119644 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.137810 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-4hp98"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.199636 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm6kr\" (UniqueName: \"kubernetes.io/projected/b4bdb338-a719-4d83-a12a-f0b18a589d65-kube-api-access-tm6kr\") pod \"nmstate-metrics-58fcddf996-6h8nl\" (UID: \"b4bdb338-a719-4d83-a12a-f0b18a589d65\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.252322 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm6kr\" (UniqueName: \"kubernetes.io/projected/b4bdb338-a719-4d83-a12a-f0b18a589d65-kube-api-access-tm6kr\") pod \"nmstate-metrics-58fcddf996-6h8nl\" (UID: \"b4bdb338-a719-4d83-a12a-f0b18a589d65\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.301458 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-nmstate-lock\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.301522 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwwrw\" (UniqueName: \"kubernetes.io/projected/a10543ec-dd79-4bc4-9330-ecca62f0dcde-kube-api-access-dwwrw\") pod \"nmstate-webhook-6d689559c5-4hp98\" (UID: \"a10543ec-dd79-4bc4-9330-ecca62f0dcde\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.301545 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-ovs-socket\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.301588 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a10543ec-dd79-4bc4-9330-ecca62f0dcde-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-4hp98\" (UID: \"a10543ec-dd79-4bc4-9330-ecca62f0dcde\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.301664 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-dbus-socket\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.301682 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb8vh\" (UniqueName: \"kubernetes.io/projected/915d1f27-b652-4527-9df6-c1a1ee347d9d-kube-api-access-nb8vh\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.314386 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.315626 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.318334 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.318534 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.318781 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-sx5sn" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.342461 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.394051 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.404535 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-dbus-socket\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.404576 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb8vh\" (UniqueName: \"kubernetes.io/projected/915d1f27-b652-4527-9df6-c1a1ee347d9d-kube-api-access-nb8vh\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.404630 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-nmstate-lock\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.404655 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwwrw\" (UniqueName: \"kubernetes.io/projected/a10543ec-dd79-4bc4-9330-ecca62f0dcde-kube-api-access-dwwrw\") pod \"nmstate-webhook-6d689559c5-4hp98\" (UID: \"a10543ec-dd79-4bc4-9330-ecca62f0dcde\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.404676 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-ovs-socket\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.404709 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a10543ec-dd79-4bc4-9330-ecca62f0dcde-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-4hp98\" (UID: \"a10543ec-dd79-4bc4-9330-ecca62f0dcde\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:56:59 crc kubenswrapper[4634]: E0929 13:56:59.404840 4634 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Sep 29 13:56:59 crc kubenswrapper[4634]: E0929 13:56:59.404890 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a10543ec-dd79-4bc4-9330-ecca62f0dcde-tls-key-pair podName:a10543ec-dd79-4bc4-9330-ecca62f0dcde nodeName:}" failed. No retries permitted until 2025-09-29 13:56:59.904872456 +0000 UTC m=+750.473600205 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/a10543ec-dd79-4bc4-9330-ecca62f0dcde-tls-key-pair") pod "nmstate-webhook-6d689559c5-4hp98" (UID: "a10543ec-dd79-4bc4-9330-ecca62f0dcde") : secret "openshift-nmstate-webhook" not found Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.405223 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-dbus-socket\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.405375 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-nmstate-lock\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.405525 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/915d1f27-b652-4527-9df6-c1a1ee347d9d-ovs-socket\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.431784 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwwrw\" (UniqueName: \"kubernetes.io/projected/a10543ec-dd79-4bc4-9330-ecca62f0dcde-kube-api-access-dwwrw\") pod \"nmstate-webhook-6d689559c5-4hp98\" (UID: \"a10543ec-dd79-4bc4-9330-ecca62f0dcde\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.437491 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb8vh\" (UniqueName: \"kubernetes.io/projected/915d1f27-b652-4527-9df6-c1a1ee347d9d-kube-api-access-nb8vh\") pod \"nmstate-handler-przz9\" (UID: \"915d1f27-b652-4527-9df6-c1a1ee347d9d\") " pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.448864 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.507146 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/92541952-adc1-4f55-a7c7-14d68fd9df0d-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.507211 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjfrh\" (UniqueName: \"kubernetes.io/projected/92541952-adc1-4f55-a7c7-14d68fd9df0d-kube-api-access-pjfrh\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.507247 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/92541952-adc1-4f55-a7c7-14d68fd9df0d-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.608444 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/92541952-adc1-4f55-a7c7-14d68fd9df0d-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.608498 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjfrh\" (UniqueName: \"kubernetes.io/projected/92541952-adc1-4f55-a7c7-14d68fd9df0d-kube-api-access-pjfrh\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.608528 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/92541952-adc1-4f55-a7c7-14d68fd9df0d-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: E0929 13:56:59.610211 4634 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 29 13:56:59 crc kubenswrapper[4634]: E0929 13:56:59.610282 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/92541952-adc1-4f55-a7c7-14d68fd9df0d-plugin-serving-cert podName:92541952-adc1-4f55-a7c7-14d68fd9df0d nodeName:}" failed. No retries permitted until 2025-09-29 13:57:00.110261784 +0000 UTC m=+750.678989533 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/92541952-adc1-4f55-a7c7-14d68fd9df0d-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-qz252" (UID: "92541952-adc1-4f55-a7c7-14d68fd9df0d") : secret "plugin-serving-cert" not found Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.610626 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/92541952-adc1-4f55-a7c7-14d68fd9df0d-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.641663 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjfrh\" (UniqueName: \"kubernetes.io/projected/92541952-adc1-4f55-a7c7-14d68fd9df0d-kube-api-access-pjfrh\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.652817 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7b4898d7bb-vj5tz"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.653747 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.764217 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7b4898d7bb-vj5tz"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.765939 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.782835 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mqhd"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.787359 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" podUID="eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" containerName="route-controller-manager" containerID="cri-o://4742419e87e203babd686c45b7af1120ef78475ae06837a4017280f8fc26ff81" gracePeriod=30 Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.788011 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" podUID="a83998e9-b33c-4d62-b1be-c2b8b5e6982d" containerName="controller-manager" containerID="cri-o://2bb07e29ba5627b9cc5e011dae0e58a8a8cba13ca9609481f67f4b81fd35f909" gracePeriod=30 Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.810876 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-trusted-ca-bundle\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.810918 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/87ffc550-c541-44b6-af21-b57f3894c243-console-serving-cert\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.810946 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-oauth-serving-cert\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.810982 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-service-ca\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.811005 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/87ffc550-c541-44b6-af21-b57f3894c243-console-oauth-config\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.811049 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-console-config\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.811075 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq454\" (UniqueName: \"kubernetes.io/projected/87ffc550-c541-44b6-af21-b57f3894c243-kube-api-access-qq454\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.904674 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl"] Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.911967 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-service-ca\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.912000 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/87ffc550-c541-44b6-af21-b57f3894c243-console-oauth-config\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.912041 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a10543ec-dd79-4bc4-9330-ecca62f0dcde-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-4hp98\" (UID: \"a10543ec-dd79-4bc4-9330-ecca62f0dcde\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.912071 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-console-config\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.912116 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq454\" (UniqueName: \"kubernetes.io/projected/87ffc550-c541-44b6-af21-b57f3894c243-kube-api-access-qq454\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.912153 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-trusted-ca-bundle\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.912169 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/87ffc550-c541-44b6-af21-b57f3894c243-console-serving-cert\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.912188 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-oauth-serving-cert\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.913156 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-console-config\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.913185 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-oauth-serving-cert\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.914035 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-service-ca\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.914468 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87ffc550-c541-44b6-af21-b57f3894c243-trusted-ca-bundle\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.917610 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/87ffc550-c541-44b6-af21-b57f3894c243-console-oauth-config\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.920832 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/87ffc550-c541-44b6-af21-b57f3894c243-console-serving-cert\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.927676 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a10543ec-dd79-4bc4-9330-ecca62f0dcde-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-4hp98\" (UID: \"a10543ec-dd79-4bc4-9330-ecca62f0dcde\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.937764 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq454\" (UniqueName: \"kubernetes.io/projected/87ffc550-c541-44b6-af21-b57f3894c243-kube-api-access-qq454\") pod \"console-7b4898d7bb-vj5tz\" (UID: \"87ffc550-c541-44b6-af21-b57f3894c243\") " pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.992019 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" event={"ID":"b4bdb338-a719-4d83-a12a-f0b18a589d65","Type":"ContainerStarted","Data":"0b759198d097078af28215ce418e9c4eead5156e09632facf0eb976f547277de"} Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.993692 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-przz9" event={"ID":"915d1f27-b652-4527-9df6-c1a1ee347d9d","Type":"ContainerStarted","Data":"7650755e254917d510c650c8c827aeb472b46081354afc0d2e7ad44c2d964f54"} Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.994526 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.995169 4634 generic.go:334] "Generic (PLEG): container finished" podID="a83998e9-b33c-4d62-b1be-c2b8b5e6982d" containerID="2bb07e29ba5627b9cc5e011dae0e58a8a8cba13ca9609481f67f4b81fd35f909" exitCode=0 Sep 29 13:56:59 crc kubenswrapper[4634]: I0929 13:56:59.995233 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" event={"ID":"a83998e9-b33c-4d62-b1be-c2b8b5e6982d","Type":"ContainerDied","Data":"2bb07e29ba5627b9cc5e011dae0e58a8a8cba13ca9609481f67f4b81fd35f909"} Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.000455 4634 generic.go:334] "Generic (PLEG): container finished" podID="eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" containerID="4742419e87e203babd686c45b7af1120ef78475ae06837a4017280f8fc26ff81" exitCode=0 Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.000480 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" event={"ID":"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a","Type":"ContainerDied","Data":"4742419e87e203babd686c45b7af1120ef78475ae06837a4017280f8fc26ff81"} Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.012287 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.114822 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/92541952-adc1-4f55-a7c7-14d68fd9df0d-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.118829 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/92541952-adc1-4f55-a7c7-14d68fd9df0d-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-qz252\" (UID: \"92541952-adc1-4f55-a7c7-14d68fd9df0d\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.224295 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.260184 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.318360 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-config\") pod \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.318666 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-serving-cert\") pod \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.318701 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-proxy-ca-bundles\") pod \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.318741 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-client-ca\") pod \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.318828 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq5vn\" (UniqueName: \"kubernetes.io/projected/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-kube-api-access-mq5vn\") pod \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\" (UID: \"a83998e9-b33c-4d62-b1be-c2b8b5e6982d\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.320873 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a83998e9-b33c-4d62-b1be-c2b8b5e6982d" (UID: "a83998e9-b33c-4d62-b1be-c2b8b5e6982d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.320925 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-client-ca" (OuterVolumeSpecName: "client-ca") pod "a83998e9-b33c-4d62-b1be-c2b8b5e6982d" (UID: "a83998e9-b33c-4d62-b1be-c2b8b5e6982d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.321081 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-config" (OuterVolumeSpecName: "config") pod "a83998e9-b33c-4d62-b1be-c2b8b5e6982d" (UID: "a83998e9-b33c-4d62-b1be-c2b8b5e6982d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.330159 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl"] Sep 29 13:57:00 crc kubenswrapper[4634]: E0929 13:57:00.330496 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a83998e9-b33c-4d62-b1be-c2b8b5e6982d" containerName="controller-manager" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.330517 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a83998e9-b33c-4d62-b1be-c2b8b5e6982d" containerName="controller-manager" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.338616 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a83998e9-b33c-4d62-b1be-c2b8b5e6982d" containerName="controller-manager" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.342934 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl"] Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.351699 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.352554 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a83998e9-b33c-4d62-b1be-c2b8b5e6982d" (UID: "a83998e9-b33c-4d62-b1be-c2b8b5e6982d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.360251 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-kube-api-access-mq5vn" (OuterVolumeSpecName: "kube-api-access-mq5vn") pod "a83998e9-b33c-4d62-b1be-c2b8b5e6982d" (UID: "a83998e9-b33c-4d62-b1be-c2b8b5e6982d"). InnerVolumeSpecName "kube-api-access-mq5vn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.371046 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.420235 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.420612 4634 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.420622 4634 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.420631 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq5vn\" (UniqueName: \"kubernetes.io/projected/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-kube-api-access-mq5vn\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.420639 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a83998e9-b33c-4d62-b1be-c2b8b5e6982d-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521017 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv2wn\" (UniqueName: \"kubernetes.io/projected/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-kube-api-access-wv2wn\") pod \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521096 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-client-ca\") pod \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521155 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-config\") pod \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521242 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-serving-cert\") pod \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\" (UID: \"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a\") " Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521366 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-client-ca\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521399 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-proxy-ca-bundles\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521460 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca218f02-916f-49a2-8185-81885d972eb0-serving-cert\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521493 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-config\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.521516 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4qgz\" (UniqueName: \"kubernetes.io/projected/ca218f02-916f-49a2-8185-81885d972eb0-kube-api-access-r4qgz\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.522653 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-config" (OuterVolumeSpecName: "config") pod "eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" (UID: "eaf3daeb-4367-42d3-a1d6-d2b526f3f68a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.522739 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-client-ca" (OuterVolumeSpecName: "client-ca") pod "eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" (UID: "eaf3daeb-4367-42d3-a1d6-d2b526f3f68a"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.528421 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-kube-api-access-wv2wn" (OuterVolumeSpecName: "kube-api-access-wv2wn") pod "eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" (UID: "eaf3daeb-4367-42d3-a1d6-d2b526f3f68a"). InnerVolumeSpecName "kube-api-access-wv2wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.528583 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" (UID: "eaf3daeb-4367-42d3-a1d6-d2b526f3f68a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625754 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca218f02-916f-49a2-8185-81885d972eb0-serving-cert\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625799 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-config\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625825 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4qgz\" (UniqueName: \"kubernetes.io/projected/ca218f02-916f-49a2-8185-81885d972eb0-kube-api-access-r4qgz\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625860 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-client-ca\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625880 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-proxy-ca-bundles\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625913 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625923 4634 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625933 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv2wn\" (UniqueName: \"kubernetes.io/projected/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-kube-api-access-wv2wn\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.625957 4634 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.627040 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-proxy-ca-bundles\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.627801 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-config\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.627873 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ca218f02-916f-49a2-8185-81885d972eb0-client-ca\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.636004 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca218f02-916f-49a2-8185-81885d972eb0-serving-cert\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.663137 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4qgz\" (UniqueName: \"kubernetes.io/projected/ca218f02-916f-49a2-8185-81885d972eb0-kube-api-access-r4qgz\") pod \"controller-manager-6d8f66f66f-lz7xl\" (UID: \"ca218f02-916f-49a2-8185-81885d972eb0\") " pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.682568 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.694161 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252"] Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.758040 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7b4898d7bb-vj5tz"] Sep 29 13:57:00 crc kubenswrapper[4634]: W0929 13:57:00.779864 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87ffc550_c541_44b6_af21_b57f3894c243.slice/crio-d5eb3b36d18c55601c5cf1a78e1c9562a18ed0b9f7187586472a29242868c93c WatchSource:0}: Error finding container d5eb3b36d18c55601c5cf1a78e1c9562a18ed0b9f7187586472a29242868c93c: Status 404 returned error can't find the container with id d5eb3b36d18c55601c5cf1a78e1c9562a18ed0b9f7187586472a29242868c93c Sep 29 13:57:00 crc kubenswrapper[4634]: I0929 13:57:00.837079 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-4hp98"] Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.014476 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7b4898d7bb-vj5tz" event={"ID":"87ffc550-c541-44b6-af21-b57f3894c243","Type":"ContainerStarted","Data":"47c6372665cf4c5496803aa14a81cbe1f91da4eb4dc416e4b1fedfe90b1df79e"} Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.014959 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7b4898d7bb-vj5tz" event={"ID":"87ffc550-c541-44b6-af21-b57f3894c243","Type":"ContainerStarted","Data":"d5eb3b36d18c55601c5cf1a78e1c9562a18ed0b9f7187586472a29242868c93c"} Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.016033 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" event={"ID":"eaf3daeb-4367-42d3-a1d6-d2b526f3f68a","Type":"ContainerDied","Data":"8565cc7407f9b6b4fb5ed0677b2fb42066f6df09a7975f16f2f80ad35b233ae3"} Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.016066 4634 scope.go:117] "RemoveContainer" containerID="4742419e87e203babd686c45b7af1120ef78475ae06837a4017280f8fc26ff81" Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.016235 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6" Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.023662 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" event={"ID":"a10543ec-dd79-4bc4-9330-ecca62f0dcde","Type":"ContainerStarted","Data":"a046d81807872a43b23044e38e5a57d5ce7b01f23ec9065c497034aa7018100c"} Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.026844 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" event={"ID":"a83998e9-b33c-4d62-b1be-c2b8b5e6982d","Type":"ContainerDied","Data":"65ec871351700ce9e053e5b1c371115dcf89ff825bb9001c6dab7f2ada6956ee"} Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.026945 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7mqhd" Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.038289 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" event={"ID":"92541952-adc1-4f55-a7c7-14d68fd9df0d","Type":"ContainerStarted","Data":"40c98469329f59460cfaa8efa13211a0ca470e728f94645e9e10ded902867b5f"} Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.050123 4634 scope.go:117] "RemoveContainer" containerID="2bb07e29ba5627b9cc5e011dae0e58a8a8cba13ca9609481f67f4b81fd35f909" Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.059593 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7b4898d7bb-vj5tz" podStartSLOduration=2.05957014 podStartE2EDuration="2.05957014s" podCreationTimestamp="2025-09-29 13:56:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:57:01.038643451 +0000 UTC m=+751.607371200" watchObservedRunningTime="2025-09-29 13:57:01.05957014 +0000 UTC m=+751.628297889" Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.079071 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6"] Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.081434 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6srb6"] Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.111055 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mqhd"] Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.115860 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7mqhd"] Sep 29 13:57:01 crc kubenswrapper[4634]: I0929 13:57:01.280362 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl"] Sep 29 13:57:01 crc kubenswrapper[4634]: W0929 13:57:01.304724 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca218f02_916f_49a2_8185_81885d972eb0.slice/crio-0bc1259e0cfd8caa5aa6db75ba9c19bbf07b1d2a4e9dc101f3147550779acee9 WatchSource:0}: Error finding container 0bc1259e0cfd8caa5aa6db75ba9c19bbf07b1d2a4e9dc101f3147550779acee9: Status 404 returned error can't find the container with id 0bc1259e0cfd8caa5aa6db75ba9c19bbf07b1d2a4e9dc101f3147550779acee9 Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.051497 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" event={"ID":"ca218f02-916f-49a2-8185-81885d972eb0","Type":"ContainerStarted","Data":"5c08436d0724c3fa188086fc656fffac08d0fdf8e80a07c46d62cd92e5e7afb2"} Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.052060 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" event={"ID":"ca218f02-916f-49a2-8185-81885d972eb0","Type":"ContainerStarted","Data":"0bc1259e0cfd8caa5aa6db75ba9c19bbf07b1d2a4e9dc101f3147550779acee9"} Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.052467 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.062509 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.078648 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6d8f66f66f-lz7xl" podStartSLOduration=2.07862103 podStartE2EDuration="2.07862103s" podCreationTimestamp="2025-09-29 13:57:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:57:02.072733738 +0000 UTC m=+752.641461487" watchObservedRunningTime="2025-09-29 13:57:02.07862103 +0000 UTC m=+752.647348779" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.102528 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw"] Sep 29 13:57:02 crc kubenswrapper[4634]: E0929 13:57:02.102821 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" containerName="route-controller-manager" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.102838 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" containerName="route-controller-manager" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.103123 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" containerName="route-controller-manager" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.103530 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.113337 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.113513 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.113623 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.113758 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.113900 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.116917 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.120354 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a83998e9-b33c-4d62-b1be-c2b8b5e6982d" path="/var/lib/kubelet/pods/a83998e9-b33c-4d62-b1be-c2b8b5e6982d/volumes" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.120851 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaf3daeb-4367-42d3-a1d6-d2b526f3f68a" path="/var/lib/kubelet/pods/eaf3daeb-4367-42d3-a1d6-d2b526f3f68a/volumes" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.131633 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw"] Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.199639 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-serving-cert\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.199715 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-client-ca\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.199826 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcskj\" (UniqueName: \"kubernetes.io/projected/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-kube-api-access-gcskj\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.199870 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-config\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.301406 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-client-ca\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.302350 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcskj\" (UniqueName: \"kubernetes.io/projected/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-kube-api-access-gcskj\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.302300 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-client-ca\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.302419 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-config\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.303234 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-config\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.304142 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-serving-cert\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.320016 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcskj\" (UniqueName: \"kubernetes.io/projected/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-kube-api-access-gcskj\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.321916 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4-serving-cert\") pod \"route-controller-manager-64cd8bf5f-9pmgw\" (UID: \"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4\") " pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.425529 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:02 crc kubenswrapper[4634]: I0929 13:57:02.777357 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw"] Sep 29 13:57:03 crc kubenswrapper[4634]: I0929 13:57:03.923350 4634 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 13:57:04 crc kubenswrapper[4634]: I0929 13:57:04.070659 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" event={"ID":"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4","Type":"ContainerStarted","Data":"984dd8008961bdc4ce6cfc633a0cfb25b3b516c6e8b1e12175364c9716b37ad6"} Sep 29 13:57:04 crc kubenswrapper[4634]: I0929 13:57:04.070709 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" event={"ID":"becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4","Type":"ContainerStarted","Data":"a6e7cdfff21707fbee761b738680f458c8629503910dfe5170f52bcf35f1db60"} Sep 29 13:57:04 crc kubenswrapper[4634]: I0929 13:57:04.070874 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:04 crc kubenswrapper[4634]: I0929 13:57:04.072446 4634 patch_prober.go:28] interesting pod/route-controller-manager-64cd8bf5f-9pmgw container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.47:8443/healthz\": dial tcp 10.217.0.47:8443: connect: connection refused" start-of-body= Sep 29 13:57:04 crc kubenswrapper[4634]: I0929 13:57:04.072492 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" podUID="becbc7a2-c9bb-4bf7-90a8-acb75d8d09f4" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.47:8443/healthz\": dial tcp 10.217.0.47:8443: connect: connection refused" Sep 29 13:57:05 crc kubenswrapper[4634]: I0929 13:57:05.077356 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" event={"ID":"b4bdb338-a719-4d83-a12a-f0b18a589d65","Type":"ContainerStarted","Data":"10044bf25037e42f288a88a570302d8dccf504f79dce86215c8641ed8fcf618c"} Sep 29 13:57:05 crc kubenswrapper[4634]: I0929 13:57:05.079326 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" event={"ID":"a10543ec-dd79-4bc4-9330-ecca62f0dcde","Type":"ContainerStarted","Data":"6c1ebe4cff8c2feb9952e688996e319c32e4457e7b2449c9122fe1729207e6aa"} Sep 29 13:57:05 crc kubenswrapper[4634]: I0929 13:57:05.079744 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:57:05 crc kubenswrapper[4634]: I0929 13:57:05.080890 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-przz9" event={"ID":"915d1f27-b652-4527-9df6-c1a1ee347d9d","Type":"ContainerStarted","Data":"8c71083bee215913be9afd563419118b650fac9e34759112b6b629760c909293"} Sep 29 13:57:05 crc kubenswrapper[4634]: I0929 13:57:05.087622 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" Sep 29 13:57:05 crc kubenswrapper[4634]: I0929 13:57:05.107950 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" podStartSLOduration=3.067195095 podStartE2EDuration="6.107926345s" podCreationTimestamp="2025-09-29 13:56:59 +0000 UTC" firstStartedPulling="2025-09-29 13:57:00.879847631 +0000 UTC m=+751.448575380" lastFinishedPulling="2025-09-29 13:57:03.920578881 +0000 UTC m=+754.489306630" observedRunningTime="2025-09-29 13:57:05.101545178 +0000 UTC m=+755.670272927" watchObservedRunningTime="2025-09-29 13:57:05.107926345 +0000 UTC m=+755.676654094" Sep 29 13:57:05 crc kubenswrapper[4634]: I0929 13:57:05.109713 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-64cd8bf5f-9pmgw" podStartSLOduration=5.109705844 podStartE2EDuration="5.109705844s" podCreationTimestamp="2025-09-29 13:57:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:57:04.118113821 +0000 UTC m=+754.686841570" watchObservedRunningTime="2025-09-29 13:57:05.109705844 +0000 UTC m=+755.678433593" Sep 29 13:57:05 crc kubenswrapper[4634]: I0929 13:57:05.121274 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-przz9" podStartSLOduration=1.7740672069999999 podStartE2EDuration="6.121248813s" podCreationTimestamp="2025-09-29 13:56:59 +0000 UTC" firstStartedPulling="2025-09-29 13:56:59.513671544 +0000 UTC m=+750.082399293" lastFinishedPulling="2025-09-29 13:57:03.86085315 +0000 UTC m=+754.429580899" observedRunningTime="2025-09-29 13:57:05.117352765 +0000 UTC m=+755.686080514" watchObservedRunningTime="2025-09-29 13:57:05.121248813 +0000 UTC m=+755.689976572" Sep 29 13:57:06 crc kubenswrapper[4634]: I0929 13:57:06.094271 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" event={"ID":"92541952-adc1-4f55-a7c7-14d68fd9df0d","Type":"ContainerStarted","Data":"c7481b1c8dfb22209e11f93d0b929b6f0a622953aa599ab49f5cc775697b4341"} Sep 29 13:57:06 crc kubenswrapper[4634]: I0929 13:57:06.094859 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:57:06 crc kubenswrapper[4634]: I0929 13:57:06.111252 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-qz252" podStartSLOduration=2.30124111 podStartE2EDuration="7.111232541s" podCreationTimestamp="2025-09-29 13:56:59 +0000 UTC" firstStartedPulling="2025-09-29 13:57:00.732660882 +0000 UTC m=+751.301388631" lastFinishedPulling="2025-09-29 13:57:05.542652313 +0000 UTC m=+756.111380062" observedRunningTime="2025-09-29 13:57:06.110224304 +0000 UTC m=+756.678952053" watchObservedRunningTime="2025-09-29 13:57:06.111232541 +0000 UTC m=+756.679960280" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.107814 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" event={"ID":"b4bdb338-a719-4d83-a12a-f0b18a589d65","Type":"ContainerStarted","Data":"07eee26e37d375c331b5061d49d5ae163db3d889d6f75bf71e2b93cdc2c563cb"} Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.141974 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-6h8nl" podStartSLOduration=2.131822896 podStartE2EDuration="9.141942319s" podCreationTimestamp="2025-09-29 13:56:59 +0000 UTC" firstStartedPulling="2025-09-29 13:56:59.955191039 +0000 UTC m=+750.523918778" lastFinishedPulling="2025-09-29 13:57:06.965310462 +0000 UTC m=+757.534038201" observedRunningTime="2025-09-29 13:57:08.137916148 +0000 UTC m=+758.706643917" watchObservedRunningTime="2025-09-29 13:57:08.141942319 +0000 UTC m=+758.710670068" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.681279 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vbtv7"] Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.683641 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.698180 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vbtv7"] Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.826328 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-catalog-content\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.826438 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-utilities\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.826470 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k94h5\" (UniqueName: \"kubernetes.io/projected/39522885-6832-4536-ad7c-86e0213573e0-kube-api-access-k94h5\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.927889 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-utilities\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.927958 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k94h5\" (UniqueName: \"kubernetes.io/projected/39522885-6832-4536-ad7c-86e0213573e0-kube-api-access-k94h5\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.927991 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-catalog-content\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.928528 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-catalog-content\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.928676 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-utilities\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:08 crc kubenswrapper[4634]: I0929 13:57:08.957797 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k94h5\" (UniqueName: \"kubernetes.io/projected/39522885-6832-4536-ad7c-86e0213573e0-kube-api-access-k94h5\") pod \"redhat-operators-vbtv7\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:09 crc kubenswrapper[4634]: I0929 13:57:09.014401 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:09 crc kubenswrapper[4634]: I0929 13:57:09.478139 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-przz9" Sep 29 13:57:09 crc kubenswrapper[4634]: I0929 13:57:09.570185 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vbtv7"] Sep 29 13:57:09 crc kubenswrapper[4634]: I0929 13:57:09.994993 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:57:09 crc kubenswrapper[4634]: I0929 13:57:09.995649 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:57:10 crc kubenswrapper[4634]: I0929 13:57:10.000014 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:57:10 crc kubenswrapper[4634]: I0929 13:57:10.122473 4634 generic.go:334] "Generic (PLEG): container finished" podID="39522885-6832-4536-ad7c-86e0213573e0" containerID="39f2151d1503f2eaf2292288944d56afd47463d45d010e3ce308f5eb4c644248" exitCode=0 Sep 29 13:57:10 crc kubenswrapper[4634]: I0929 13:57:10.124064 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbtv7" event={"ID":"39522885-6832-4536-ad7c-86e0213573e0","Type":"ContainerDied","Data":"39f2151d1503f2eaf2292288944d56afd47463d45d010e3ce308f5eb4c644248"} Sep 29 13:57:10 crc kubenswrapper[4634]: I0929 13:57:10.124113 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbtv7" event={"ID":"39522885-6832-4536-ad7c-86e0213573e0","Type":"ContainerStarted","Data":"df03450108bda7381a2570939e42319758d18759f749aef59f66eb29a927c1f9"} Sep 29 13:57:10 crc kubenswrapper[4634]: I0929 13:57:10.147130 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7b4898d7bb-vj5tz" Sep 29 13:57:10 crc kubenswrapper[4634]: I0929 13:57:10.236256 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-5nvq7"] Sep 29 13:57:11 crc kubenswrapper[4634]: I0929 13:57:11.131472 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbtv7" event={"ID":"39522885-6832-4536-ad7c-86e0213573e0","Type":"ContainerStarted","Data":"a485306e94400e856b14356c0bfafab100a818370852d22e5be9808d0c70f13c"} Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.142797 4634 generic.go:334] "Generic (PLEG): container finished" podID="39522885-6832-4536-ad7c-86e0213573e0" containerID="a485306e94400e856b14356c0bfafab100a818370852d22e5be9808d0c70f13c" exitCode=0 Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.142899 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbtv7" event={"ID":"39522885-6832-4536-ad7c-86e0213573e0","Type":"ContainerDied","Data":"a485306e94400e856b14356c0bfafab100a818370852d22e5be9808d0c70f13c"} Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.279062 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f6vmr"] Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.285212 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.309970 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6vmr"] Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.386717 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-catalog-content\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.386806 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-utilities\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.386845 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfkfx\" (UniqueName: \"kubernetes.io/projected/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-kube-api-access-kfkfx\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.488453 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-utilities\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.488535 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfkfx\" (UniqueName: \"kubernetes.io/projected/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-kube-api-access-kfkfx\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.488611 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-catalog-content\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.489203 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-catalog-content\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.489550 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-utilities\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.522143 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfkfx\" (UniqueName: \"kubernetes.io/projected/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-kube-api-access-kfkfx\") pod \"community-operators-f6vmr\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:12 crc kubenswrapper[4634]: I0929 13:57:12.614765 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:13 crc kubenswrapper[4634]: I0929 13:57:13.202406 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbtv7" event={"ID":"39522885-6832-4536-ad7c-86e0213573e0","Type":"ContainerStarted","Data":"38052a34c8ca1fe31485ad0f14e5006123770bb7dd52a67968e7daba0309328f"} Sep 29 13:57:13 crc kubenswrapper[4634]: I0929 13:57:13.239973 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vbtv7" podStartSLOduration=2.784053241 podStartE2EDuration="5.239935482s" podCreationTimestamp="2025-09-29 13:57:08 +0000 UTC" firstStartedPulling="2025-09-29 13:57:10.124757134 +0000 UTC m=+760.693484883" lastFinishedPulling="2025-09-29 13:57:12.580639375 +0000 UTC m=+763.149367124" observedRunningTime="2025-09-29 13:57:13.23265407 +0000 UTC m=+763.801381819" watchObservedRunningTime="2025-09-29 13:57:13.239935482 +0000 UTC m=+763.808663231" Sep 29 13:57:13 crc kubenswrapper[4634]: I0929 13:57:13.244275 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6vmr"] Sep 29 13:57:13 crc kubenswrapper[4634]: W0929 13:57:13.255729 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16e728b3_3cfe_45d1_b9ed_5180687b6d4b.slice/crio-5adcf213fdd6718eda51c1ad4bc116dbc7f39b4a2d5b4a67304995e406afc745 WatchSource:0}: Error finding container 5adcf213fdd6718eda51c1ad4bc116dbc7f39b4a2d5b4a67304995e406afc745: Status 404 returned error can't find the container with id 5adcf213fdd6718eda51c1ad4bc116dbc7f39b4a2d5b4a67304995e406afc745 Sep 29 13:57:14 crc kubenswrapper[4634]: I0929 13:57:14.210233 4634 generic.go:334] "Generic (PLEG): container finished" podID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerID="32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9" exitCode=0 Sep 29 13:57:14 crc kubenswrapper[4634]: I0929 13:57:14.210357 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vmr" event={"ID":"16e728b3-3cfe-45d1-b9ed-5180687b6d4b","Type":"ContainerDied","Data":"32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9"} Sep 29 13:57:14 crc kubenswrapper[4634]: I0929 13:57:14.210771 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vmr" event={"ID":"16e728b3-3cfe-45d1-b9ed-5180687b6d4b","Type":"ContainerStarted","Data":"5adcf213fdd6718eda51c1ad4bc116dbc7f39b4a2d5b4a67304995e406afc745"} Sep 29 13:57:14 crc kubenswrapper[4634]: I0929 13:57:14.395829 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:57:14 crc kubenswrapper[4634]: I0929 13:57:14.395933 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:57:14 crc kubenswrapper[4634]: I0929 13:57:14.396007 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 13:57:14 crc kubenswrapper[4634]: I0929 13:57:14.396867 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dbcc2440180d99fb45da22933b773ac34313e312284872b76ccc1d05c2cec895"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 13:57:14 crc kubenswrapper[4634]: I0929 13:57:14.396953 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://dbcc2440180d99fb45da22933b773ac34313e312284872b76ccc1d05c2cec895" gracePeriod=600 Sep 29 13:57:15 crc kubenswrapper[4634]: I0929 13:57:15.217604 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vmr" event={"ID":"16e728b3-3cfe-45d1-b9ed-5180687b6d4b","Type":"ContainerStarted","Data":"2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee"} Sep 29 13:57:15 crc kubenswrapper[4634]: I0929 13:57:15.220228 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="dbcc2440180d99fb45da22933b773ac34313e312284872b76ccc1d05c2cec895" exitCode=0 Sep 29 13:57:15 crc kubenswrapper[4634]: I0929 13:57:15.220267 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"dbcc2440180d99fb45da22933b773ac34313e312284872b76ccc1d05c2cec895"} Sep 29 13:57:15 crc kubenswrapper[4634]: I0929 13:57:15.220330 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"66e5f7cddcf0d5a52ca4459df7c3e5983f76e1e654e4c50e5ebc51cf61af5126"} Sep 29 13:57:15 crc kubenswrapper[4634]: I0929 13:57:15.220353 4634 scope.go:117] "RemoveContainer" containerID="612b3e56f8cc9f853bca3919763d42da7657a071824b09305d83106a338a3a6e" Sep 29 13:57:16 crc kubenswrapper[4634]: I0929 13:57:16.228400 4634 generic.go:334] "Generic (PLEG): container finished" podID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerID="2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee" exitCode=0 Sep 29 13:57:16 crc kubenswrapper[4634]: I0929 13:57:16.228480 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vmr" event={"ID":"16e728b3-3cfe-45d1-b9ed-5180687b6d4b","Type":"ContainerDied","Data":"2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee"} Sep 29 13:57:17 crc kubenswrapper[4634]: I0929 13:57:17.242180 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vmr" event={"ID":"16e728b3-3cfe-45d1-b9ed-5180687b6d4b","Type":"ContainerStarted","Data":"0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f"} Sep 29 13:57:17 crc kubenswrapper[4634]: I0929 13:57:17.261035 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f6vmr" podStartSLOduration=2.499954194 podStartE2EDuration="5.261019743s" podCreationTimestamp="2025-09-29 13:57:12 +0000 UTC" firstStartedPulling="2025-09-29 13:57:14.214556324 +0000 UTC m=+764.783284113" lastFinishedPulling="2025-09-29 13:57:16.975621913 +0000 UTC m=+767.544349662" observedRunningTime="2025-09-29 13:57:17.258188135 +0000 UTC m=+767.826915904" watchObservedRunningTime="2025-09-29 13:57:17.261019743 +0000 UTC m=+767.829747492" Sep 29 13:57:19 crc kubenswrapper[4634]: I0929 13:57:19.015118 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:19 crc kubenswrapper[4634]: I0929 13:57:19.015399 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:19 crc kubenswrapper[4634]: I0929 13:57:19.072252 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:19 crc kubenswrapper[4634]: I0929 13:57:19.321323 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:20 crc kubenswrapper[4634]: I0929 13:57:20.017704 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-4hp98" Sep 29 13:57:21 crc kubenswrapper[4634]: I0929 13:57:21.858051 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vbtv7"] Sep 29 13:57:21 crc kubenswrapper[4634]: I0929 13:57:21.858570 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vbtv7" podUID="39522885-6832-4536-ad7c-86e0213573e0" containerName="registry-server" containerID="cri-o://38052a34c8ca1fe31485ad0f14e5006123770bb7dd52a67968e7daba0309328f" gracePeriod=2 Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.278584 4634 generic.go:334] "Generic (PLEG): container finished" podID="39522885-6832-4536-ad7c-86e0213573e0" containerID="38052a34c8ca1fe31485ad0f14e5006123770bb7dd52a67968e7daba0309328f" exitCode=0 Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.278632 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbtv7" event={"ID":"39522885-6832-4536-ad7c-86e0213573e0","Type":"ContainerDied","Data":"38052a34c8ca1fe31485ad0f14e5006123770bb7dd52a67968e7daba0309328f"} Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.278662 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbtv7" event={"ID":"39522885-6832-4536-ad7c-86e0213573e0","Type":"ContainerDied","Data":"df03450108bda7381a2570939e42319758d18759f749aef59f66eb29a927c1f9"} Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.278675 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df03450108bda7381a2570939e42319758d18759f749aef59f66eb29a927c1f9" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.298859 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.429125 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-utilities\") pod \"39522885-6832-4536-ad7c-86e0213573e0\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.429168 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k94h5\" (UniqueName: \"kubernetes.io/projected/39522885-6832-4536-ad7c-86e0213573e0-kube-api-access-k94h5\") pod \"39522885-6832-4536-ad7c-86e0213573e0\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.429271 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-catalog-content\") pod \"39522885-6832-4536-ad7c-86e0213573e0\" (UID: \"39522885-6832-4536-ad7c-86e0213573e0\") " Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.431042 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-utilities" (OuterVolumeSpecName: "utilities") pod "39522885-6832-4536-ad7c-86e0213573e0" (UID: "39522885-6832-4536-ad7c-86e0213573e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.436203 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39522885-6832-4536-ad7c-86e0213573e0-kube-api-access-k94h5" (OuterVolumeSpecName: "kube-api-access-k94h5") pod "39522885-6832-4536-ad7c-86e0213573e0" (UID: "39522885-6832-4536-ad7c-86e0213573e0"). InnerVolumeSpecName "kube-api-access-k94h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.512810 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39522885-6832-4536-ad7c-86e0213573e0" (UID: "39522885-6832-4536-ad7c-86e0213573e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.530668 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.530698 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k94h5\" (UniqueName: \"kubernetes.io/projected/39522885-6832-4536-ad7c-86e0213573e0-kube-api-access-k94h5\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.530709 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39522885-6832-4536-ad7c-86e0213573e0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.615192 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.616172 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:22 crc kubenswrapper[4634]: I0929 13:57:22.649744 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:23 crc kubenswrapper[4634]: I0929 13:57:23.284712 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vbtv7" Sep 29 13:57:23 crc kubenswrapper[4634]: I0929 13:57:23.328645 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vbtv7"] Sep 29 13:57:23 crc kubenswrapper[4634]: I0929 13:57:23.332690 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vbtv7"] Sep 29 13:57:23 crc kubenswrapper[4634]: I0929 13:57:23.361325 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:24 crc kubenswrapper[4634]: I0929 13:57:24.116831 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39522885-6832-4536-ad7c-86e0213573e0" path="/var/lib/kubelet/pods/39522885-6832-4536-ad7c-86e0213573e0/volumes" Sep 29 13:57:25 crc kubenswrapper[4634]: I0929 13:57:25.061224 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6vmr"] Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.300047 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f6vmr" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerName="registry-server" containerID="cri-o://0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f" gracePeriod=2 Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.758520 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.896943 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfkfx\" (UniqueName: \"kubernetes.io/projected/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-kube-api-access-kfkfx\") pod \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.897055 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-catalog-content\") pod \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.897101 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-utilities\") pod \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\" (UID: \"16e728b3-3cfe-45d1-b9ed-5180687b6d4b\") " Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.897976 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-utilities" (OuterVolumeSpecName: "utilities") pod "16e728b3-3cfe-45d1-b9ed-5180687b6d4b" (UID: "16e728b3-3cfe-45d1-b9ed-5180687b6d4b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.902899 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.903631 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-kube-api-access-kfkfx" (OuterVolumeSpecName: "kube-api-access-kfkfx") pod "16e728b3-3cfe-45d1-b9ed-5180687b6d4b" (UID: "16e728b3-3cfe-45d1-b9ed-5180687b6d4b"). InnerVolumeSpecName "kube-api-access-kfkfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:57:26 crc kubenswrapper[4634]: I0929 13:57:26.945090 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "16e728b3-3cfe-45d1-b9ed-5180687b6d4b" (UID: "16e728b3-3cfe-45d1-b9ed-5180687b6d4b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.004194 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.004239 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfkfx\" (UniqueName: \"kubernetes.io/projected/16e728b3-3cfe-45d1-b9ed-5180687b6d4b-kube-api-access-kfkfx\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.307256 4634 generic.go:334] "Generic (PLEG): container finished" podID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerID="0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f" exitCode=0 Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.307407 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vmr" event={"ID":"16e728b3-3cfe-45d1-b9ed-5180687b6d4b","Type":"ContainerDied","Data":"0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f"} Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.307575 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6vmr" event={"ID":"16e728b3-3cfe-45d1-b9ed-5180687b6d4b","Type":"ContainerDied","Data":"5adcf213fdd6718eda51c1ad4bc116dbc7f39b4a2d5b4a67304995e406afc745"} Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.307596 4634 scope.go:117] "RemoveContainer" containerID="0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.307474 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6vmr" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.335152 4634 scope.go:117] "RemoveContainer" containerID="2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.339205 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6vmr"] Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.345626 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f6vmr"] Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.352251 4634 scope.go:117] "RemoveContainer" containerID="32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.369294 4634 scope.go:117] "RemoveContainer" containerID="0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f" Sep 29 13:57:27 crc kubenswrapper[4634]: E0929 13:57:27.369888 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f\": container with ID starting with 0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f not found: ID does not exist" containerID="0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.369918 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f"} err="failed to get container status \"0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f\": rpc error: code = NotFound desc = could not find container \"0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f\": container with ID starting with 0a4eb7a6d307f123b7689da7c44ca85d7a5590c18b9d3093623b732e51a2247f not found: ID does not exist" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.369940 4634 scope.go:117] "RemoveContainer" containerID="2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee" Sep 29 13:57:27 crc kubenswrapper[4634]: E0929 13:57:27.370239 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee\": container with ID starting with 2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee not found: ID does not exist" containerID="2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.370269 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee"} err="failed to get container status \"2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee\": rpc error: code = NotFound desc = could not find container \"2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee\": container with ID starting with 2ffce0e345b909f3b554a0840ad7a6c5e45c703a9588edfda9d80dd66401e8ee not found: ID does not exist" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.370287 4634 scope.go:117] "RemoveContainer" containerID="32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9" Sep 29 13:57:27 crc kubenswrapper[4634]: E0929 13:57:27.370562 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9\": container with ID starting with 32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9 not found: ID does not exist" containerID="32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9" Sep 29 13:57:27 crc kubenswrapper[4634]: I0929 13:57:27.370615 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9"} err="failed to get container status \"32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9\": rpc error: code = NotFound desc = could not find container \"32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9\": container with ID starting with 32eb47be883c9146aadce3c0a732104883d13a190e783b735bf7edbaa910d1c9 not found: ID does not exist" Sep 29 13:57:28 crc kubenswrapper[4634]: I0929 13:57:28.116659 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" path="/var/lib/kubelet/pods/16e728b3-3cfe-45d1-b9ed-5180687b6d4b/volumes" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.866683 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2"] Sep 29 13:57:33 crc kubenswrapper[4634]: E0929 13:57:33.867517 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39522885-6832-4536-ad7c-86e0213573e0" containerName="extract-content" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.867535 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="39522885-6832-4536-ad7c-86e0213573e0" containerName="extract-content" Sep 29 13:57:33 crc kubenswrapper[4634]: E0929 13:57:33.867550 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerName="extract-content" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.867557 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerName="extract-content" Sep 29 13:57:33 crc kubenswrapper[4634]: E0929 13:57:33.867568 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerName="registry-server" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.867579 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerName="registry-server" Sep 29 13:57:33 crc kubenswrapper[4634]: E0929 13:57:33.867588 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerName="extract-utilities" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.867595 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerName="extract-utilities" Sep 29 13:57:33 crc kubenswrapper[4634]: E0929 13:57:33.867607 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39522885-6832-4536-ad7c-86e0213573e0" containerName="registry-server" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.867613 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="39522885-6832-4536-ad7c-86e0213573e0" containerName="registry-server" Sep 29 13:57:33 crc kubenswrapper[4634]: E0929 13:57:33.867623 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39522885-6832-4536-ad7c-86e0213573e0" containerName="extract-utilities" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.867629 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="39522885-6832-4536-ad7c-86e0213573e0" containerName="extract-utilities" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.867751 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="16e728b3-3cfe-45d1-b9ed-5180687b6d4b" containerName="registry-server" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.867766 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="39522885-6832-4536-ad7c-86e0213573e0" containerName="registry-server" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.868644 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.870846 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.888236 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2"] Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.961313 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.961390 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:33 crc kubenswrapper[4634]: I0929 13:57:33.961577 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jwft\" (UniqueName: \"kubernetes.io/projected/7239db40-db27-43b6-9f27-64800144ed27-kube-api-access-6jwft\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:34 crc kubenswrapper[4634]: I0929 13:57:34.062496 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jwft\" (UniqueName: \"kubernetes.io/projected/7239db40-db27-43b6-9f27-64800144ed27-kube-api-access-6jwft\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:34 crc kubenswrapper[4634]: I0929 13:57:34.062561 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:34 crc kubenswrapper[4634]: I0929 13:57:34.062640 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:34 crc kubenswrapper[4634]: I0929 13:57:34.063336 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:34 crc kubenswrapper[4634]: I0929 13:57:34.063435 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:34 crc kubenswrapper[4634]: I0929 13:57:34.100542 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jwft\" (UniqueName: \"kubernetes.io/projected/7239db40-db27-43b6-9f27-64800144ed27-kube-api-access-6jwft\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:34 crc kubenswrapper[4634]: I0929 13:57:34.188886 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:34 crc kubenswrapper[4634]: I0929 13:57:34.613358 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2"] Sep 29 13:57:34 crc kubenswrapper[4634]: W0929 13:57:34.620580 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7239db40_db27_43b6_9f27_64800144ed27.slice/crio-0d39dd482a048a8bb626c9e0e477b1f2cab222dbde53cb066f456bebedfac31c WatchSource:0}: Error finding container 0d39dd482a048a8bb626c9e0e477b1f2cab222dbde53cb066f456bebedfac31c: Status 404 returned error can't find the container with id 0d39dd482a048a8bb626c9e0e477b1f2cab222dbde53cb066f456bebedfac31c Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.294310 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-5nvq7" podUID="95f9d479-e9b9-4086-8792-83625bfaff6e" containerName="console" containerID="cri-o://44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337" gracePeriod=15 Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.351547 4634 generic.go:334] "Generic (PLEG): container finished" podID="7239db40-db27-43b6-9f27-64800144ed27" containerID="438e1d969c1c0421c8089e1870d0468830913969a859ebbbd5561d8db01e3598" exitCode=0 Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.351609 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" event={"ID":"7239db40-db27-43b6-9f27-64800144ed27","Type":"ContainerDied","Data":"438e1d969c1c0421c8089e1870d0468830913969a859ebbbd5561d8db01e3598"} Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.351825 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" event={"ID":"7239db40-db27-43b6-9f27-64800144ed27","Type":"ContainerStarted","Data":"0d39dd482a048a8bb626c9e0e477b1f2cab222dbde53cb066f456bebedfac31c"} Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.729912 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-5nvq7_95f9d479-e9b9-4086-8792-83625bfaff6e/console/0.log" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.729982 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.884507 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-trusted-ca-bundle\") pod \"95f9d479-e9b9-4086-8792-83625bfaff6e\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.884666 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdsqg\" (UniqueName: \"kubernetes.io/projected/95f9d479-e9b9-4086-8792-83625bfaff6e-kube-api-access-vdsqg\") pod \"95f9d479-e9b9-4086-8792-83625bfaff6e\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.884755 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-console-config\") pod \"95f9d479-e9b9-4086-8792-83625bfaff6e\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.885467 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "95f9d479-e9b9-4086-8792-83625bfaff6e" (UID: "95f9d479-e9b9-4086-8792-83625bfaff6e"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.885515 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-console-config" (OuterVolumeSpecName: "console-config") pod "95f9d479-e9b9-4086-8792-83625bfaff6e" (UID: "95f9d479-e9b9-4086-8792-83625bfaff6e"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.886196 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-oauth-config\") pod \"95f9d479-e9b9-4086-8792-83625bfaff6e\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.886272 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-service-ca\") pod \"95f9d479-e9b9-4086-8792-83625bfaff6e\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.886296 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-oauth-serving-cert\") pod \"95f9d479-e9b9-4086-8792-83625bfaff6e\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.886330 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-serving-cert\") pod \"95f9d479-e9b9-4086-8792-83625bfaff6e\" (UID: \"95f9d479-e9b9-4086-8792-83625bfaff6e\") " Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.886761 4634 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.886778 4634 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.887463 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-service-ca" (OuterVolumeSpecName: "service-ca") pod "95f9d479-e9b9-4086-8792-83625bfaff6e" (UID: "95f9d479-e9b9-4086-8792-83625bfaff6e"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.887561 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "95f9d479-e9b9-4086-8792-83625bfaff6e" (UID: "95f9d479-e9b9-4086-8792-83625bfaff6e"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.890541 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "95f9d479-e9b9-4086-8792-83625bfaff6e" (UID: "95f9d479-e9b9-4086-8792-83625bfaff6e"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.891320 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95f9d479-e9b9-4086-8792-83625bfaff6e-kube-api-access-vdsqg" (OuterVolumeSpecName: "kube-api-access-vdsqg") pod "95f9d479-e9b9-4086-8792-83625bfaff6e" (UID: "95f9d479-e9b9-4086-8792-83625bfaff6e"). InnerVolumeSpecName "kube-api-access-vdsqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.893106 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "95f9d479-e9b9-4086-8792-83625bfaff6e" (UID: "95f9d479-e9b9-4086-8792-83625bfaff6e"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.988467 4634 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.988501 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdsqg\" (UniqueName: \"kubernetes.io/projected/95f9d479-e9b9-4086-8792-83625bfaff6e-kube-api-access-vdsqg\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.988513 4634 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/95f9d479-e9b9-4086-8792-83625bfaff6e-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.988569 4634 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:35 crc kubenswrapper[4634]: I0929 13:57:35.988579 4634 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/95f9d479-e9b9-4086-8792-83625bfaff6e-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.358371 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-5nvq7_95f9d479-e9b9-4086-8792-83625bfaff6e/console/0.log" Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.359325 4634 generic.go:334] "Generic (PLEG): container finished" podID="95f9d479-e9b9-4086-8792-83625bfaff6e" containerID="44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337" exitCode=2 Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.359388 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5nvq7" event={"ID":"95f9d479-e9b9-4086-8792-83625bfaff6e","Type":"ContainerDied","Data":"44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337"} Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.359409 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5nvq7" Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.359434 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5nvq7" event={"ID":"95f9d479-e9b9-4086-8792-83625bfaff6e","Type":"ContainerDied","Data":"e49358bc5e094869639e7fc5ffb4945730affc01db25fc24793dd6b87978780f"} Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.359457 4634 scope.go:117] "RemoveContainer" containerID="44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337" Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.381378 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-5nvq7"] Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.388498 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-5nvq7"] Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.399902 4634 scope.go:117] "RemoveContainer" containerID="44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337" Sep 29 13:57:36 crc kubenswrapper[4634]: E0929 13:57:36.401814 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337\": container with ID starting with 44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337 not found: ID does not exist" containerID="44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337" Sep 29 13:57:36 crc kubenswrapper[4634]: I0929 13:57:36.401842 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337"} err="failed to get container status \"44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337\": rpc error: code = NotFound desc = could not find container \"44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337\": container with ID starting with 44214d089bc563978db427885c2976e00054b1845bd4e0542325a65caef37337 not found: ID does not exist" Sep 29 13:57:37 crc kubenswrapper[4634]: I0929 13:57:37.366446 4634 generic.go:334] "Generic (PLEG): container finished" podID="7239db40-db27-43b6-9f27-64800144ed27" containerID="cdeb1e39929450eefb1348329bea53bb71ccaffeb37c91ad0e70182979cb7355" exitCode=0 Sep 29 13:57:37 crc kubenswrapper[4634]: I0929 13:57:37.366548 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" event={"ID":"7239db40-db27-43b6-9f27-64800144ed27","Type":"ContainerDied","Data":"cdeb1e39929450eefb1348329bea53bb71ccaffeb37c91ad0e70182979cb7355"} Sep 29 13:57:38 crc kubenswrapper[4634]: I0929 13:57:38.124380 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95f9d479-e9b9-4086-8792-83625bfaff6e" path="/var/lib/kubelet/pods/95f9d479-e9b9-4086-8792-83625bfaff6e/volumes" Sep 29 13:57:38 crc kubenswrapper[4634]: I0929 13:57:38.376627 4634 generic.go:334] "Generic (PLEG): container finished" podID="7239db40-db27-43b6-9f27-64800144ed27" containerID="824c688f956d4a2026eeb43005e504816fcb17fdbe579b371f6587bf32de7d41" exitCode=0 Sep 29 13:57:38 crc kubenswrapper[4634]: I0929 13:57:38.376691 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" event={"ID":"7239db40-db27-43b6-9f27-64800144ed27","Type":"ContainerDied","Data":"824c688f956d4a2026eeb43005e504816fcb17fdbe579b371f6587bf32de7d41"} Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.646836 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.740309 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jwft\" (UniqueName: \"kubernetes.io/projected/7239db40-db27-43b6-9f27-64800144ed27-kube-api-access-6jwft\") pod \"7239db40-db27-43b6-9f27-64800144ed27\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.740370 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-bundle\") pod \"7239db40-db27-43b6-9f27-64800144ed27\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.740460 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-util\") pod \"7239db40-db27-43b6-9f27-64800144ed27\" (UID: \"7239db40-db27-43b6-9f27-64800144ed27\") " Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.742016 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-bundle" (OuterVolumeSpecName: "bundle") pod "7239db40-db27-43b6-9f27-64800144ed27" (UID: "7239db40-db27-43b6-9f27-64800144ed27"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.745551 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7239db40-db27-43b6-9f27-64800144ed27-kube-api-access-6jwft" (OuterVolumeSpecName: "kube-api-access-6jwft") pod "7239db40-db27-43b6-9f27-64800144ed27" (UID: "7239db40-db27-43b6-9f27-64800144ed27"). InnerVolumeSpecName "kube-api-access-6jwft". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.764342 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-util" (OuterVolumeSpecName: "util") pod "7239db40-db27-43b6-9f27-64800144ed27" (UID: "7239db40-db27-43b6-9f27-64800144ed27"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.842233 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jwft\" (UniqueName: \"kubernetes.io/projected/7239db40-db27-43b6-9f27-64800144ed27-kube-api-access-6jwft\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.842264 4634 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:39 crc kubenswrapper[4634]: I0929 13:57:39.842273 4634 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7239db40-db27-43b6-9f27-64800144ed27-util\") on node \"crc\" DevicePath \"\"" Sep 29 13:57:40 crc kubenswrapper[4634]: I0929 13:57:40.389271 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" event={"ID":"7239db40-db27-43b6-9f27-64800144ed27","Type":"ContainerDied","Data":"0d39dd482a048a8bb626c9e0e477b1f2cab222dbde53cb066f456bebedfac31c"} Sep 29 13:57:40 crc kubenswrapper[4634]: I0929 13:57:40.389313 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d39dd482a048a8bb626c9e0e477b1f2cab222dbde53cb066f456bebedfac31c" Sep 29 13:57:40 crc kubenswrapper[4634]: I0929 13:57:40.389387 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.076070 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-64444f645d-qz74c"] Sep 29 13:57:51 crc kubenswrapper[4634]: E0929 13:57:51.076679 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7239db40-db27-43b6-9f27-64800144ed27" containerName="util" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.076691 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7239db40-db27-43b6-9f27-64800144ed27" containerName="util" Sep 29 13:57:51 crc kubenswrapper[4634]: E0929 13:57:51.076700 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7239db40-db27-43b6-9f27-64800144ed27" containerName="pull" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.076706 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7239db40-db27-43b6-9f27-64800144ed27" containerName="pull" Sep 29 13:57:51 crc kubenswrapper[4634]: E0929 13:57:51.076713 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7239db40-db27-43b6-9f27-64800144ed27" containerName="extract" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.076719 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7239db40-db27-43b6-9f27-64800144ed27" containerName="extract" Sep 29 13:57:51 crc kubenswrapper[4634]: E0929 13:57:51.076729 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95f9d479-e9b9-4086-8792-83625bfaff6e" containerName="console" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.076735 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="95f9d479-e9b9-4086-8792-83625bfaff6e" containerName="console" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.076829 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="95f9d479-e9b9-4086-8792-83625bfaff6e" containerName="console" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.076846 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="7239db40-db27-43b6-9f27-64800144ed27" containerName="extract" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.077205 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.080993 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-55mlx" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.081367 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.081415 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.083599 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.083709 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.126059 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-64444f645d-qz74c"] Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.181440 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a7e55c36-0f57-469f-8419-b9ccb4465010-webhook-cert\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.181864 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8djmn\" (UniqueName: \"kubernetes.io/projected/a7e55c36-0f57-469f-8419-b9ccb4465010-kube-api-access-8djmn\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.181927 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a7e55c36-0f57-469f-8419-b9ccb4465010-apiservice-cert\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.283039 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a7e55c36-0f57-469f-8419-b9ccb4465010-webhook-cert\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.283100 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8djmn\" (UniqueName: \"kubernetes.io/projected/a7e55c36-0f57-469f-8419-b9ccb4465010-kube-api-access-8djmn\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.283161 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a7e55c36-0f57-469f-8419-b9ccb4465010-apiservice-cert\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.292179 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a7e55c36-0f57-469f-8419-b9ccb4465010-apiservice-cert\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.292179 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a7e55c36-0f57-469f-8419-b9ccb4465010-webhook-cert\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.318926 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8djmn\" (UniqueName: \"kubernetes.io/projected/a7e55c36-0f57-469f-8419-b9ccb4465010-kube-api-access-8djmn\") pod \"metallb-operator-controller-manager-64444f645d-qz74c\" (UID: \"a7e55c36-0f57-469f-8419-b9ccb4465010\") " pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.391465 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.408610 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9"] Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.409346 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.412421 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.412717 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.412891 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-8d9gb" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.439249 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9"] Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.486344 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9n6x\" (UniqueName: \"kubernetes.io/projected/03429314-d17f-4ffa-9d58-b89748690fec-kube-api-access-q9n6x\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.486601 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/03429314-d17f-4ffa-9d58-b89748690fec-webhook-cert\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.486694 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/03429314-d17f-4ffa-9d58-b89748690fec-apiservice-cert\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.589674 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/03429314-d17f-4ffa-9d58-b89748690fec-apiservice-cert\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.589982 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9n6x\" (UniqueName: \"kubernetes.io/projected/03429314-d17f-4ffa-9d58-b89748690fec-kube-api-access-q9n6x\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.590045 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/03429314-d17f-4ffa-9d58-b89748690fec-webhook-cert\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.594020 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/03429314-d17f-4ffa-9d58-b89748690fec-webhook-cert\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.594313 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/03429314-d17f-4ffa-9d58-b89748690fec-apiservice-cert\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.621817 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9n6x\" (UniqueName: \"kubernetes.io/projected/03429314-d17f-4ffa-9d58-b89748690fec-kube-api-access-q9n6x\") pod \"metallb-operator-webhook-server-5ff45f5c66-t6xh9\" (UID: \"03429314-d17f-4ffa-9d58-b89748690fec\") " pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.761557 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:51 crc kubenswrapper[4634]: I0929 13:57:51.921966 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-64444f645d-qz74c"] Sep 29 13:57:51 crc kubenswrapper[4634]: W0929 13:57:51.930746 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7e55c36_0f57_469f_8419_b9ccb4465010.slice/crio-45f28cf0a7b3898228a13426c1ce0cf6b2384a69779999b486ecae1511ce7593 WatchSource:0}: Error finding container 45f28cf0a7b3898228a13426c1ce0cf6b2384a69779999b486ecae1511ce7593: Status 404 returned error can't find the container with id 45f28cf0a7b3898228a13426c1ce0cf6b2384a69779999b486ecae1511ce7593 Sep 29 13:57:52 crc kubenswrapper[4634]: I0929 13:57:52.220863 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9"] Sep 29 13:57:52 crc kubenswrapper[4634]: W0929 13:57:52.229318 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03429314_d17f_4ffa_9d58_b89748690fec.slice/crio-9ab1c6182996280010b65766e541a36c2c749336d29ff4ed62fc7fffb4bf1351 WatchSource:0}: Error finding container 9ab1c6182996280010b65766e541a36c2c749336d29ff4ed62fc7fffb4bf1351: Status 404 returned error can't find the container with id 9ab1c6182996280010b65766e541a36c2c749336d29ff4ed62fc7fffb4bf1351 Sep 29 13:57:52 crc kubenswrapper[4634]: I0929 13:57:52.485703 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" event={"ID":"a7e55c36-0f57-469f-8419-b9ccb4465010","Type":"ContainerStarted","Data":"45f28cf0a7b3898228a13426c1ce0cf6b2384a69779999b486ecae1511ce7593"} Sep 29 13:57:52 crc kubenswrapper[4634]: I0929 13:57:52.497878 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" event={"ID":"03429314-d17f-4ffa-9d58-b89748690fec","Type":"ContainerStarted","Data":"9ab1c6182996280010b65766e541a36c2c749336d29ff4ed62fc7fffb4bf1351"} Sep 29 13:57:57 crc kubenswrapper[4634]: I0929 13:57:57.531074 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" event={"ID":"a7e55c36-0f57-469f-8419-b9ccb4465010","Type":"ContainerStarted","Data":"7892aec59f90620841a00a9c47ff2857f62cc6dd9abd9b61c38e92f1bd5b9ea0"} Sep 29 13:57:57 crc kubenswrapper[4634]: I0929 13:57:57.535661 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" event={"ID":"03429314-d17f-4ffa-9d58-b89748690fec","Type":"ContainerStarted","Data":"114dba046dc34c982cb1c2d744cf121837ec710652d9f39063edab7a87a57ac2"} Sep 29 13:57:57 crc kubenswrapper[4634]: I0929 13:57:57.536323 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:57:57 crc kubenswrapper[4634]: I0929 13:57:57.558762 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" podStartSLOduration=1.446344689 podStartE2EDuration="6.558736187s" podCreationTimestamp="2025-09-29 13:57:51 +0000 UTC" firstStartedPulling="2025-09-29 13:57:52.232763642 +0000 UTC m=+802.801491391" lastFinishedPulling="2025-09-29 13:57:57.34515514 +0000 UTC m=+807.913882889" observedRunningTime="2025-09-29 13:57:57.554784341 +0000 UTC m=+808.123512090" watchObservedRunningTime="2025-09-29 13:57:57.558736187 +0000 UTC m=+808.127463936" Sep 29 13:57:58 crc kubenswrapper[4634]: I0929 13:57:58.568803 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" podStartSLOduration=2.238315196 podStartE2EDuration="7.56878588s" podCreationTimestamp="2025-09-29 13:57:51 +0000 UTC" firstStartedPulling="2025-09-29 13:57:51.933225211 +0000 UTC m=+802.501952960" lastFinishedPulling="2025-09-29 13:57:57.263695895 +0000 UTC m=+807.832423644" observedRunningTime="2025-09-29 13:57:58.568102534 +0000 UTC m=+809.136830283" watchObservedRunningTime="2025-09-29 13:57:58.56878588 +0000 UTC m=+809.137513619" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.209860 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bcw"] Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.211613 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.228593 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bcw"] Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.353481 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn5gs\" (UniqueName: \"kubernetes.io/projected/03983889-374e-4311-bd74-266d223760be-kube-api-access-pn5gs\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.353531 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-catalog-content\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.353612 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-utilities\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.392711 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.455421 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-utilities\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.455507 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn5gs\" (UniqueName: \"kubernetes.io/projected/03983889-374e-4311-bd74-266d223760be-kube-api-access-pn5gs\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.455545 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-catalog-content\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.456359 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-utilities\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.456381 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-catalog-content\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.488020 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn5gs\" (UniqueName: \"kubernetes.io/projected/03983889-374e-4311-bd74-266d223760be-kube-api-access-pn5gs\") pod \"redhat-marketplace-w4bcw\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.529988 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:01 crc kubenswrapper[4634]: I0929 13:58:01.851326 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bcw"] Sep 29 13:58:02 crc kubenswrapper[4634]: I0929 13:58:02.575217 4634 generic.go:334] "Generic (PLEG): container finished" podID="03983889-374e-4311-bd74-266d223760be" containerID="97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3" exitCode=0 Sep 29 13:58:02 crc kubenswrapper[4634]: I0929 13:58:02.575338 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bcw" event={"ID":"03983889-374e-4311-bd74-266d223760be","Type":"ContainerDied","Data":"97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3"} Sep 29 13:58:02 crc kubenswrapper[4634]: I0929 13:58:02.575636 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bcw" event={"ID":"03983889-374e-4311-bd74-266d223760be","Type":"ContainerStarted","Data":"ac34a706375f73f49872c5d80913e7209864788e884444840de1bdae4ab7fd72"} Sep 29 13:58:03 crc kubenswrapper[4634]: I0929 13:58:03.588225 4634 generic.go:334] "Generic (PLEG): container finished" podID="03983889-374e-4311-bd74-266d223760be" containerID="e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e" exitCode=0 Sep 29 13:58:03 crc kubenswrapper[4634]: I0929 13:58:03.588264 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bcw" event={"ID":"03983889-374e-4311-bd74-266d223760be","Type":"ContainerDied","Data":"e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e"} Sep 29 13:58:04 crc kubenswrapper[4634]: I0929 13:58:04.595433 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bcw" event={"ID":"03983889-374e-4311-bd74-266d223760be","Type":"ContainerStarted","Data":"980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600"} Sep 29 13:58:11 crc kubenswrapper[4634]: I0929 13:58:11.530281 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:11 crc kubenswrapper[4634]: I0929 13:58:11.530626 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:11 crc kubenswrapper[4634]: I0929 13:58:11.585762 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:11 crc kubenswrapper[4634]: I0929 13:58:11.609494 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w4bcw" podStartSLOduration=9.198835192 podStartE2EDuration="10.609475207s" podCreationTimestamp="2025-09-29 13:58:01 +0000 UTC" firstStartedPulling="2025-09-29 13:58:02.577205189 +0000 UTC m=+813.145932958" lastFinishedPulling="2025-09-29 13:58:03.987845214 +0000 UTC m=+814.556572973" observedRunningTime="2025-09-29 13:58:04.634392797 +0000 UTC m=+815.203120546" watchObservedRunningTime="2025-09-29 13:58:11.609475207 +0000 UTC m=+822.178202956" Sep 29 13:58:11 crc kubenswrapper[4634]: I0929 13:58:11.715546 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:11 crc kubenswrapper[4634]: I0929 13:58:11.765874 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" Sep 29 13:58:12 crc kubenswrapper[4634]: I0929 13:58:12.587939 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bcw"] Sep 29 13:58:13 crc kubenswrapper[4634]: I0929 13:58:13.675739 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-w4bcw" podUID="03983889-374e-4311-bd74-266d223760be" containerName="registry-server" containerID="cri-o://980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600" gracePeriod=2 Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.112209 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.230675 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-utilities\") pod \"03983889-374e-4311-bd74-266d223760be\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.230716 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-catalog-content\") pod \"03983889-374e-4311-bd74-266d223760be\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.230741 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pn5gs\" (UniqueName: \"kubernetes.io/projected/03983889-374e-4311-bd74-266d223760be-kube-api-access-pn5gs\") pod \"03983889-374e-4311-bd74-266d223760be\" (UID: \"03983889-374e-4311-bd74-266d223760be\") " Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.232687 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-utilities" (OuterVolumeSpecName: "utilities") pod "03983889-374e-4311-bd74-266d223760be" (UID: "03983889-374e-4311-bd74-266d223760be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.237897 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03983889-374e-4311-bd74-266d223760be-kube-api-access-pn5gs" (OuterVolumeSpecName: "kube-api-access-pn5gs") pod "03983889-374e-4311-bd74-266d223760be" (UID: "03983889-374e-4311-bd74-266d223760be"). InnerVolumeSpecName "kube-api-access-pn5gs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.251724 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03983889-374e-4311-bd74-266d223760be" (UID: "03983889-374e-4311-bd74-266d223760be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.333970 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.334008 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03983889-374e-4311-bd74-266d223760be-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.334019 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pn5gs\" (UniqueName: \"kubernetes.io/projected/03983889-374e-4311-bd74-266d223760be-kube-api-access-pn5gs\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.686264 4634 generic.go:334] "Generic (PLEG): container finished" podID="03983889-374e-4311-bd74-266d223760be" containerID="980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600" exitCode=0 Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.686306 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bcw" event={"ID":"03983889-374e-4311-bd74-266d223760be","Type":"ContainerDied","Data":"980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600"} Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.686333 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w4bcw" event={"ID":"03983889-374e-4311-bd74-266d223760be","Type":"ContainerDied","Data":"ac34a706375f73f49872c5d80913e7209864788e884444840de1bdae4ab7fd72"} Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.686349 4634 scope.go:117] "RemoveContainer" containerID="980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.686455 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w4bcw" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.705156 4634 scope.go:117] "RemoveContainer" containerID="e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.741223 4634 scope.go:117] "RemoveContainer" containerID="97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.747871 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bcw"] Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.763506 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-w4bcw"] Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.763746 4634 scope.go:117] "RemoveContainer" containerID="980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600" Sep 29 13:58:14 crc kubenswrapper[4634]: E0929 13:58:14.764351 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600\": container with ID starting with 980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600 not found: ID does not exist" containerID="980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.764418 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600"} err="failed to get container status \"980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600\": rpc error: code = NotFound desc = could not find container \"980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600\": container with ID starting with 980e3d4ece443073442174f733d64b307f513979d0cf8a419b44121de421d600 not found: ID does not exist" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.764535 4634 scope.go:117] "RemoveContainer" containerID="e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e" Sep 29 13:58:14 crc kubenswrapper[4634]: E0929 13:58:14.764911 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e\": container with ID starting with e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e not found: ID does not exist" containerID="e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.765061 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e"} err="failed to get container status \"e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e\": rpc error: code = NotFound desc = could not find container \"e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e\": container with ID starting with e70a6a7b4b2a150dbde1c90ec9aad8f6f9cb839e47afbe9050ec123bde3c159e not found: ID does not exist" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.765191 4634 scope.go:117] "RemoveContainer" containerID="97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3" Sep 29 13:58:14 crc kubenswrapper[4634]: E0929 13:58:14.765513 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3\": container with ID starting with 97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3 not found: ID does not exist" containerID="97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3" Sep 29 13:58:14 crc kubenswrapper[4634]: I0929 13:58:14.765542 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3"} err="failed to get container status \"97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3\": rpc error: code = NotFound desc = could not find container \"97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3\": container with ID starting with 97556f17c0417f0f654145a91e52af619e98b696e77c071442d558f91f7c1bd3 not found: ID does not exist" Sep 29 13:58:16 crc kubenswrapper[4634]: I0929 13:58:16.118815 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03983889-374e-4311-bd74-266d223760be" path="/var/lib/kubelet/pods/03983889-374e-4311-bd74-266d223760be/volumes" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.292500 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fqr45"] Sep 29 13:58:29 crc kubenswrapper[4634]: E0929 13:58:29.293203 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03983889-374e-4311-bd74-266d223760be" containerName="extract-utilities" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.293216 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="03983889-374e-4311-bd74-266d223760be" containerName="extract-utilities" Sep 29 13:58:29 crc kubenswrapper[4634]: E0929 13:58:29.293229 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03983889-374e-4311-bd74-266d223760be" containerName="extract-content" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.293236 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="03983889-374e-4311-bd74-266d223760be" containerName="extract-content" Sep 29 13:58:29 crc kubenswrapper[4634]: E0929 13:58:29.293244 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03983889-374e-4311-bd74-266d223760be" containerName="registry-server" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.293252 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="03983889-374e-4311-bd74-266d223760be" containerName="registry-server" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.293389 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="03983889-374e-4311-bd74-266d223760be" containerName="registry-server" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.294134 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.315179 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fqr45"] Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.440653 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjhq8\" (UniqueName: \"kubernetes.io/projected/dd668355-50fc-4cb3-bc77-fdfc56f545b0-kube-api-access-kjhq8\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.440814 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-catalog-content\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.440915 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-utilities\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.541796 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjhq8\" (UniqueName: \"kubernetes.io/projected/dd668355-50fc-4cb3-bc77-fdfc56f545b0-kube-api-access-kjhq8\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.541851 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-catalog-content\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.541889 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-utilities\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.542593 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-utilities\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.542947 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-catalog-content\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.564484 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjhq8\" (UniqueName: \"kubernetes.io/projected/dd668355-50fc-4cb3-bc77-fdfc56f545b0-kube-api-access-kjhq8\") pod \"certified-operators-fqr45\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:29 crc kubenswrapper[4634]: I0929 13:58:29.638272 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:30 crc kubenswrapper[4634]: I0929 13:58:30.144695 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fqr45"] Sep 29 13:58:30 crc kubenswrapper[4634]: I0929 13:58:30.793022 4634 generic.go:334] "Generic (PLEG): container finished" podID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerID="d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9" exitCode=0 Sep 29 13:58:30 crc kubenswrapper[4634]: I0929 13:58:30.793150 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqr45" event={"ID":"dd668355-50fc-4cb3-bc77-fdfc56f545b0","Type":"ContainerDied","Data":"d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9"} Sep 29 13:58:30 crc kubenswrapper[4634]: I0929 13:58:30.794267 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqr45" event={"ID":"dd668355-50fc-4cb3-bc77-fdfc56f545b0","Type":"ContainerStarted","Data":"0502c48920d33c30050cb7acb5554846187c47024b305c8db4d90bee7547a61d"} Sep 29 13:58:31 crc kubenswrapper[4634]: I0929 13:58:31.396218 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-64444f645d-qz74c" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.305968 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-mgm82"] Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.307986 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.311073 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.313694 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-qllsn" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.315896 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.336326 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-7772m"] Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.337267 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.340880 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.358730 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-7772m"] Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.437950 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-fbwzx"] Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.458965 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.462695 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.463337 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.463636 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.469400 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-p64lg" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.474343 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-vrscx"] Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.476241 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.494979 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.495793 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-reloader\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.495847 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-metrics\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.495886 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4a4e6c8-9854-47b3-b11e-41a9c78334a8-cert\") pod \"frr-k8s-webhook-server-5478bdb765-7772m\" (UID: \"c4a4e6c8-9854-47b3-b11e-41a9c78334a8\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.495910 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-conf\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.495927 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrcqz\" (UniqueName: \"kubernetes.io/projected/c4a4e6c8-9854-47b3-b11e-41a9c78334a8-kube-api-access-lrcqz\") pod \"frr-k8s-webhook-server-5478bdb765-7772m\" (UID: \"c4a4e6c8-9854-47b3-b11e-41a9c78334a8\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.495955 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r7zz\" (UniqueName: \"kubernetes.io/projected/f9a02c1e-9d46-46f2-891d-d8b81b95736c-kube-api-access-8r7zz\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.495980 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-sockets\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.496006 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a02c1e-9d46-46f2-891d-d8b81b95736c-metrics-certs\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.496039 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-startup\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.511205 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-vrscx"] Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599779 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96h5g\" (UniqueName: \"kubernetes.io/projected/799ed420-430a-45c8-99a7-de9125bf452d-kube-api-access-96h5g\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599815 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/799ed420-430a-45c8-99a7-de9125bf452d-metrics-certs\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599855 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-metrics-certs\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599879 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-startup\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599918 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtr45\" (UniqueName: \"kubernetes.io/projected/e5394208-75d4-4a32-98c2-16299c7bf5fa-kube-api-access-gtr45\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599938 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-reloader\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599957 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-metrics\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599978 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e5394208-75d4-4a32-98c2-16299c7bf5fa-metallb-excludel2\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.599995 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.600013 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/799ed420-430a-45c8-99a7-de9125bf452d-cert\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.600031 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4a4e6c8-9854-47b3-b11e-41a9c78334a8-cert\") pod \"frr-k8s-webhook-server-5478bdb765-7772m\" (UID: \"c4a4e6c8-9854-47b3-b11e-41a9c78334a8\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.600068 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-conf\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.600097 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrcqz\" (UniqueName: \"kubernetes.io/projected/c4a4e6c8-9854-47b3-b11e-41a9c78334a8-kube-api-access-lrcqz\") pod \"frr-k8s-webhook-server-5478bdb765-7772m\" (UID: \"c4a4e6c8-9854-47b3-b11e-41a9c78334a8\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.600120 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r7zz\" (UniqueName: \"kubernetes.io/projected/f9a02c1e-9d46-46f2-891d-d8b81b95736c-kube-api-access-8r7zz\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.600142 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-sockets\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.600161 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a02c1e-9d46-46f2-891d-d8b81b95736c-metrics-certs\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.601182 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-startup\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.601431 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-reloader\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.601623 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-metrics\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.601823 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-conf\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.602567 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/f9a02c1e-9d46-46f2-891d-d8b81b95736c-frr-sockets\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.610928 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f9a02c1e-9d46-46f2-891d-d8b81b95736c-metrics-certs\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.618339 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r7zz\" (UniqueName: \"kubernetes.io/projected/f9a02c1e-9d46-46f2-891d-d8b81b95736c-kube-api-access-8r7zz\") pod \"frr-k8s-mgm82\" (UID: \"f9a02c1e-9d46-46f2-891d-d8b81b95736c\") " pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.619878 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrcqz\" (UniqueName: \"kubernetes.io/projected/c4a4e6c8-9854-47b3-b11e-41a9c78334a8-kube-api-access-lrcqz\") pod \"frr-k8s-webhook-server-5478bdb765-7772m\" (UID: \"c4a4e6c8-9854-47b3-b11e-41a9c78334a8\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.626553 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c4a4e6c8-9854-47b3-b11e-41a9c78334a8-cert\") pod \"frr-k8s-webhook-server-5478bdb765-7772m\" (UID: \"c4a4e6c8-9854-47b3-b11e-41a9c78334a8\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.626877 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.654794 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.700760 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e5394208-75d4-4a32-98c2-16299c7bf5fa-metallb-excludel2\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.700806 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.700831 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/799ed420-430a-45c8-99a7-de9125bf452d-cert\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.700880 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96h5g\" (UniqueName: \"kubernetes.io/projected/799ed420-430a-45c8-99a7-de9125bf452d-kube-api-access-96h5g\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.700898 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-metrics-certs\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.700913 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/799ed420-430a-45c8-99a7-de9125bf452d-metrics-certs\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.700947 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtr45\" (UniqueName: \"kubernetes.io/projected/e5394208-75d4-4a32-98c2-16299c7bf5fa-kube-api-access-gtr45\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.701993 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e5394208-75d4-4a32-98c2-16299c7bf5fa-metallb-excludel2\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: E0929 13:58:32.702054 4634 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 13:58:32 crc kubenswrapper[4634]: E0929 13:58:32.702118 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist podName:e5394208-75d4-4a32-98c2-16299c7bf5fa nodeName:}" failed. No retries permitted until 2025-09-29 13:58:33.202076684 +0000 UTC m=+843.770804433 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist") pod "speaker-fbwzx" (UID: "e5394208-75d4-4a32-98c2-16299c7bf5fa") : secret "metallb-memberlist" not found Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.708684 4634 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.709296 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/799ed420-430a-45c8-99a7-de9125bf452d-metrics-certs\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.714351 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-metrics-certs\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.716711 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/799ed420-430a-45c8-99a7-de9125bf452d-cert\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.737764 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtr45\" (UniqueName: \"kubernetes.io/projected/e5394208-75d4-4a32-98c2-16299c7bf5fa-kube-api-access-gtr45\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.737825 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96h5g\" (UniqueName: \"kubernetes.io/projected/799ed420-430a-45c8-99a7-de9125bf452d-kube-api-access-96h5g\") pod \"controller-5d688f5ffc-vrscx\" (UID: \"799ed420-430a-45c8-99a7-de9125bf452d\") " pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:32 crc kubenswrapper[4634]: I0929 13:58:32.840693 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.127785 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-7772m"] Sep 29 13:58:33 crc kubenswrapper[4634]: W0929 13:58:33.135628 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4a4e6c8_9854_47b3_b11e_41a9c78334a8.slice/crio-6fd4c198aa3fe8c829ceb4b3e994ad6f8a5abb457b71b3fcb14c37b686187804 WatchSource:0}: Error finding container 6fd4c198aa3fe8c829ceb4b3e994ad6f8a5abb457b71b3fcb14c37b686187804: Status 404 returned error can't find the container with id 6fd4c198aa3fe8c829ceb4b3e994ad6f8a5abb457b71b3fcb14c37b686187804 Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.209582 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:33 crc kubenswrapper[4634]: E0929 13:58:33.210638 4634 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 13:58:33 crc kubenswrapper[4634]: E0929 13:58:33.210682 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist podName:e5394208-75d4-4a32-98c2-16299c7bf5fa nodeName:}" failed. No retries permitted until 2025-09-29 13:58:34.210668512 +0000 UTC m=+844.779396261 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist") pod "speaker-fbwzx" (UID: "e5394208-75d4-4a32-98c2-16299c7bf5fa") : secret "metallb-memberlist" not found Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.275306 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-vrscx"] Sep 29 13:58:33 crc kubenswrapper[4634]: W0929 13:58:33.287067 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod799ed420_430a_45c8_99a7_de9125bf452d.slice/crio-224aab13ea50f46e6156349b698dcc24d37a4c7bfddc75c0654b6c207ad49669 WatchSource:0}: Error finding container 224aab13ea50f46e6156349b698dcc24d37a4c7bfddc75c0654b6c207ad49669: Status 404 returned error can't find the container with id 224aab13ea50f46e6156349b698dcc24d37a4c7bfddc75c0654b6c207ad49669 Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.830767 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-vrscx" event={"ID":"799ed420-430a-45c8-99a7-de9125bf452d","Type":"ContainerStarted","Data":"1c74b793a24c58caef70c4707692c084e9864852b7b2d09ffbc9d6a4781ea425"} Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.830806 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-vrscx" event={"ID":"799ed420-430a-45c8-99a7-de9125bf452d","Type":"ContainerStarted","Data":"b779fab24fc8d5039331bc2f904717f1506de2c7450012d227aaff2ed1f9a4c1"} Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.830817 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-vrscx" event={"ID":"799ed420-430a-45c8-99a7-de9125bf452d","Type":"ContainerStarted","Data":"224aab13ea50f46e6156349b698dcc24d37a4c7bfddc75c0654b6c207ad49669"} Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.831828 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.833270 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerStarted","Data":"b95ce24ee1bec7f0952e2df4270e4bf2dbede37fb88ef86d8aefdf723fa339bd"} Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.835297 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" event={"ID":"c4a4e6c8-9854-47b3-b11e-41a9c78334a8","Type":"ContainerStarted","Data":"6fd4c198aa3fe8c829ceb4b3e994ad6f8a5abb457b71b3fcb14c37b686187804"} Sep 29 13:58:33 crc kubenswrapper[4634]: I0929 13:58:33.853825 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-vrscx" podStartSLOduration=1.853809432 podStartE2EDuration="1.853809432s" podCreationTimestamp="2025-09-29 13:58:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:58:33.848959904 +0000 UTC m=+844.417687653" watchObservedRunningTime="2025-09-29 13:58:33.853809432 +0000 UTC m=+844.422537181" Sep 29 13:58:34 crc kubenswrapper[4634]: I0929 13:58:34.223104 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:34 crc kubenswrapper[4634]: I0929 13:58:34.229400 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e5394208-75d4-4a32-98c2-16299c7bf5fa-memberlist\") pod \"speaker-fbwzx\" (UID: \"e5394208-75d4-4a32-98c2-16299c7bf5fa\") " pod="metallb-system/speaker-fbwzx" Sep 29 13:58:34 crc kubenswrapper[4634]: I0929 13:58:34.272498 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-fbwzx" Sep 29 13:58:37 crc kubenswrapper[4634]: I0929 13:58:37.860344 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fbwzx" event={"ID":"e5394208-75d4-4a32-98c2-16299c7bf5fa","Type":"ContainerStarted","Data":"8d480bd3a4e004f06bf4f88acfbc59b6246037c9802392afe6e17a6a5e46bbf5"} Sep 29 13:58:37 crc kubenswrapper[4634]: I0929 13:58:37.860881 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fbwzx" event={"ID":"e5394208-75d4-4a32-98c2-16299c7bf5fa","Type":"ContainerStarted","Data":"6a442de6c3897981f94328b6df91987df5df619a784e63cdd887c32c3205f143"} Sep 29 13:58:37 crc kubenswrapper[4634]: I0929 13:58:37.860891 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-fbwzx" event={"ID":"e5394208-75d4-4a32-98c2-16299c7bf5fa","Type":"ContainerStarted","Data":"0fc3144c4f5526f6544f68c8c12333b712b90bbfb35ef0293b3a55607388138f"} Sep 29 13:58:37 crc kubenswrapper[4634]: I0929 13:58:37.861081 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-fbwzx" Sep 29 13:58:37 crc kubenswrapper[4634]: I0929 13:58:37.861960 4634 generic.go:334] "Generic (PLEG): container finished" podID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerID="4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60" exitCode=0 Sep 29 13:58:37 crc kubenswrapper[4634]: I0929 13:58:37.862026 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqr45" event={"ID":"dd668355-50fc-4cb3-bc77-fdfc56f545b0","Type":"ContainerDied","Data":"4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60"} Sep 29 13:58:37 crc kubenswrapper[4634]: I0929 13:58:37.879703 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-fbwzx" podStartSLOduration=5.879686502 podStartE2EDuration="5.879686502s" podCreationTimestamp="2025-09-29 13:58:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 13:58:37.878909833 +0000 UTC m=+848.447637572" watchObservedRunningTime="2025-09-29 13:58:37.879686502 +0000 UTC m=+848.448414251" Sep 29 13:58:38 crc kubenswrapper[4634]: I0929 13:58:38.873281 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqr45" event={"ID":"dd668355-50fc-4cb3-bc77-fdfc56f545b0","Type":"ContainerStarted","Data":"eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5"} Sep 29 13:58:39 crc kubenswrapper[4634]: I0929 13:58:39.640348 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:39 crc kubenswrapper[4634]: I0929 13:58:39.640671 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:40 crc kubenswrapper[4634]: I0929 13:58:40.129712 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fqr45" podStartSLOduration=3.64135482 podStartE2EDuration="11.129676262s" podCreationTimestamp="2025-09-29 13:58:29 +0000 UTC" firstStartedPulling="2025-09-29 13:58:30.795812514 +0000 UTC m=+841.364540273" lastFinishedPulling="2025-09-29 13:58:38.284133966 +0000 UTC m=+848.852861715" observedRunningTime="2025-09-29 13:58:38.893995309 +0000 UTC m=+849.462723068" watchObservedRunningTime="2025-09-29 13:58:40.129676262 +0000 UTC m=+850.698404011" Sep 29 13:58:40 crc kubenswrapper[4634]: I0929 13:58:40.701394 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-fqr45" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="registry-server" probeResult="failure" output=< Sep 29 13:58:40 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 13:58:40 crc kubenswrapper[4634]: > Sep 29 13:58:43 crc kubenswrapper[4634]: I0929 13:58:43.904719 4634 generic.go:334] "Generic (PLEG): container finished" podID="f9a02c1e-9d46-46f2-891d-d8b81b95736c" containerID="aac956742bf6833f5448a80c6ae10b116386fd63963641f4422fc7025a52247c" exitCode=0 Sep 29 13:58:43 crc kubenswrapper[4634]: I0929 13:58:43.904762 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerDied","Data":"aac956742bf6833f5448a80c6ae10b116386fd63963641f4422fc7025a52247c"} Sep 29 13:58:43 crc kubenswrapper[4634]: I0929 13:58:43.906733 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" event={"ID":"c4a4e6c8-9854-47b3-b11e-41a9c78334a8","Type":"ContainerStarted","Data":"a604578216824bf4f1652ddf42643403c121dafb6be5c3b500e878b06d299a92"} Sep 29 13:58:43 crc kubenswrapper[4634]: I0929 13:58:43.907227 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:58:43 crc kubenswrapper[4634]: I0929 13:58:43.988253 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" podStartSLOduration=1.690820577 podStartE2EDuration="11.9882296s" podCreationTimestamp="2025-09-29 13:58:32 +0000 UTC" firstStartedPulling="2025-09-29 13:58:33.138634116 +0000 UTC m=+843.707361865" lastFinishedPulling="2025-09-29 13:58:43.436043139 +0000 UTC m=+854.004770888" observedRunningTime="2025-09-29 13:58:43.984096404 +0000 UTC m=+854.552824143" watchObservedRunningTime="2025-09-29 13:58:43.9882296 +0000 UTC m=+854.556957349" Sep 29 13:58:44 crc kubenswrapper[4634]: I0929 13:58:44.919633 4634 generic.go:334] "Generic (PLEG): container finished" podID="f9a02c1e-9d46-46f2-891d-d8b81b95736c" containerID="20043b6c240c8a833aa73dd124bca868a814430776e0e97a2e3dfa33e774b7bf" exitCode=0 Sep 29 13:58:44 crc kubenswrapper[4634]: I0929 13:58:44.921571 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerDied","Data":"20043b6c240c8a833aa73dd124bca868a814430776e0e97a2e3dfa33e774b7bf"} Sep 29 13:58:45 crc kubenswrapper[4634]: I0929 13:58:45.949448 4634 generic.go:334] "Generic (PLEG): container finished" podID="f9a02c1e-9d46-46f2-891d-d8b81b95736c" containerID="7da866a34f025abe39f62925decce49b5b0b830d09d657e43b96c0a837b8c868" exitCode=0 Sep 29 13:58:45 crc kubenswrapper[4634]: I0929 13:58:45.951464 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerDied","Data":"7da866a34f025abe39f62925decce49b5b0b830d09d657e43b96c0a837b8c868"} Sep 29 13:58:46 crc kubenswrapper[4634]: I0929 13:58:46.959538 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerStarted","Data":"39e25c186908349b0ac5caa2ae729a494acf8de4dab03f59abf1ea9062c27c04"} Sep 29 13:58:46 crc kubenswrapper[4634]: I0929 13:58:46.959966 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerStarted","Data":"312ae3407250d7af63f1281449d744d7f0416e3338628d26d3234e65bc43f7ea"} Sep 29 13:58:46 crc kubenswrapper[4634]: I0929 13:58:46.959984 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerStarted","Data":"4efa79b82fffbd8601dd620a4813eb5e804382b7755454c655bb70baa590a3b1"} Sep 29 13:58:46 crc kubenswrapper[4634]: I0929 13:58:46.959996 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerStarted","Data":"3cb584dbbfb7861619d3b5c12971c40129bea23df2f539c9e9d4d1368f85acd7"} Sep 29 13:58:46 crc kubenswrapper[4634]: I0929 13:58:46.960008 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerStarted","Data":"b5d09ebc25fbc190e030890272f3c03ad20d4bb716b2f334ff4d88a07b71058b"} Sep 29 13:58:47 crc kubenswrapper[4634]: I0929 13:58:47.974879 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mgm82" event={"ID":"f9a02c1e-9d46-46f2-891d-d8b81b95736c","Type":"ContainerStarted","Data":"6924e6a7efce978a9f03600c19722698c80f5ccb14128e87984b026c28b2a652"} Sep 29 13:58:47 crc kubenswrapper[4634]: I0929 13:58:47.975151 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:48 crc kubenswrapper[4634]: I0929 13:58:48.008021 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-mgm82" podStartSLOduration=5.434977017 podStartE2EDuration="16.008002253s" podCreationTimestamp="2025-09-29 13:58:32 +0000 UTC" firstStartedPulling="2025-09-29 13:58:32.863706602 +0000 UTC m=+843.432434351" lastFinishedPulling="2025-09-29 13:58:43.436731808 +0000 UTC m=+854.005459587" observedRunningTime="2025-09-29 13:58:48.00324263 +0000 UTC m=+858.571970399" watchObservedRunningTime="2025-09-29 13:58:48.008002253 +0000 UTC m=+858.576730022" Sep 29 13:58:49 crc kubenswrapper[4634]: I0929 13:58:49.711306 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:49 crc kubenswrapper[4634]: I0929 13:58:49.764055 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fqr45" Sep 29 13:58:49 crc kubenswrapper[4634]: I0929 13:58:49.844603 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fqr45"] Sep 29 13:58:49 crc kubenswrapper[4634]: I0929 13:58:49.955024 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tw5l9"] Sep 29 13:58:49 crc kubenswrapper[4634]: I0929 13:58:49.955357 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tw5l9" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerName="registry-server" containerID="cri-o://0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2" gracePeriod=2 Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.405529 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.585838 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb57n\" (UniqueName: \"kubernetes.io/projected/df7b8866-1de8-4b2e-a301-591d5abfd01a-kube-api-access-rb57n\") pod \"df7b8866-1de8-4b2e-a301-591d5abfd01a\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.585934 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-utilities\") pod \"df7b8866-1de8-4b2e-a301-591d5abfd01a\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.585983 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-catalog-content\") pod \"df7b8866-1de8-4b2e-a301-591d5abfd01a\" (UID: \"df7b8866-1de8-4b2e-a301-591d5abfd01a\") " Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.591136 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-utilities" (OuterVolumeSpecName: "utilities") pod "df7b8866-1de8-4b2e-a301-591d5abfd01a" (UID: "df7b8866-1de8-4b2e-a301-591d5abfd01a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.598352 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df7b8866-1de8-4b2e-a301-591d5abfd01a-kube-api-access-rb57n" (OuterVolumeSpecName: "kube-api-access-rb57n") pod "df7b8866-1de8-4b2e-a301-591d5abfd01a" (UID: "df7b8866-1de8-4b2e-a301-591d5abfd01a"). InnerVolumeSpecName "kube-api-access-rb57n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.636440 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df7b8866-1de8-4b2e-a301-591d5abfd01a" (UID: "df7b8866-1de8-4b2e-a301-591d5abfd01a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.687251 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.687281 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df7b8866-1de8-4b2e-a301-591d5abfd01a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:50 crc kubenswrapper[4634]: I0929 13:58:50.687294 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb57n\" (UniqueName: \"kubernetes.io/projected/df7b8866-1de8-4b2e-a301-591d5abfd01a-kube-api-access-rb57n\") on node \"crc\" DevicePath \"\"" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.006107 4634 generic.go:334] "Generic (PLEG): container finished" podID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerID="0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2" exitCode=0 Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.008050 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tw5l9" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.018304 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tw5l9" event={"ID":"df7b8866-1de8-4b2e-a301-591d5abfd01a","Type":"ContainerDied","Data":"0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2"} Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.018414 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tw5l9" event={"ID":"df7b8866-1de8-4b2e-a301-591d5abfd01a","Type":"ContainerDied","Data":"ed2b72ec748580aede1b76dce92a9e5486f838cb1e712ccf44240e549268c9c4"} Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.018477 4634 scope.go:117] "RemoveContainer" containerID="0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.045451 4634 scope.go:117] "RemoveContainer" containerID="e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.087920 4634 scope.go:117] "RemoveContainer" containerID="07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.088068 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tw5l9"] Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.096153 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tw5l9"] Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.112315 4634 scope.go:117] "RemoveContainer" containerID="0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2" Sep 29 13:58:51 crc kubenswrapper[4634]: E0929 13:58:51.112802 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2\": container with ID starting with 0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2 not found: ID does not exist" containerID="0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.112850 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2"} err="failed to get container status \"0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2\": rpc error: code = NotFound desc = could not find container \"0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2\": container with ID starting with 0f8418c6cb3d56ecb8dec9a6c0e0636d0ee6cb76499e801c0571190903d948e2 not found: ID does not exist" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.112879 4634 scope.go:117] "RemoveContainer" containerID="e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975" Sep 29 13:58:51 crc kubenswrapper[4634]: E0929 13:58:51.113881 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975\": container with ID starting with e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975 not found: ID does not exist" containerID="e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.113910 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975"} err="failed to get container status \"e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975\": rpc error: code = NotFound desc = could not find container \"e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975\": container with ID starting with e528ea6788bf927cfaf403add0b51665394fe2dfd15ccebb3babc9b47dfb8975 not found: ID does not exist" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.113926 4634 scope.go:117] "RemoveContainer" containerID="07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e" Sep 29 13:58:51 crc kubenswrapper[4634]: E0929 13:58:51.114375 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e\": container with ID starting with 07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e not found: ID does not exist" containerID="07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e" Sep 29 13:58:51 crc kubenswrapper[4634]: I0929 13:58:51.114426 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e"} err="failed to get container status \"07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e\": rpc error: code = NotFound desc = could not find container \"07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e\": container with ID starting with 07b12ef495b714c8a6aac0d6cc09484700f196d4e37e2ad5af819a4f702b755e not found: ID does not exist" Sep 29 13:58:52 crc kubenswrapper[4634]: I0929 13:58:52.118271 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" path="/var/lib/kubelet/pods/df7b8866-1de8-4b2e-a301-591d5abfd01a/volumes" Sep 29 13:58:52 crc kubenswrapper[4634]: I0929 13:58:52.627604 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:52 crc kubenswrapper[4634]: I0929 13:58:52.693212 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-mgm82" Sep 29 13:58:52 crc kubenswrapper[4634]: I0929 13:58:52.849958 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-vrscx" Sep 29 13:58:54 crc kubenswrapper[4634]: I0929 13:58:54.280166 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-fbwzx" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.780042 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-p7lbq"] Sep 29 13:58:57 crc kubenswrapper[4634]: E0929 13:58:57.780741 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerName="extract-utilities" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.780764 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerName="extract-utilities" Sep 29 13:58:57 crc kubenswrapper[4634]: E0929 13:58:57.780787 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerName="extract-content" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.780797 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerName="extract-content" Sep 29 13:58:57 crc kubenswrapper[4634]: E0929 13:58:57.780823 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerName="registry-server" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.780836 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerName="registry-server" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.781003 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="df7b8866-1de8-4b2e-a301-591d5abfd01a" containerName="registry-server" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.781595 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-p7lbq" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.785969 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.786237 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.785975 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-j2w5r" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.805435 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8fd6\" (UniqueName: \"kubernetes.io/projected/9b83f594-0978-4c2b-9a19-df52826ee516-kube-api-access-q8fd6\") pod \"openstack-operator-index-p7lbq\" (UID: \"9b83f594-0978-4c2b-9a19-df52826ee516\") " pod="openstack-operators/openstack-operator-index-p7lbq" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.821233 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-p7lbq"] Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.906825 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8fd6\" (UniqueName: \"kubernetes.io/projected/9b83f594-0978-4c2b-9a19-df52826ee516-kube-api-access-q8fd6\") pod \"openstack-operator-index-p7lbq\" (UID: \"9b83f594-0978-4c2b-9a19-df52826ee516\") " pod="openstack-operators/openstack-operator-index-p7lbq" Sep 29 13:58:57 crc kubenswrapper[4634]: I0929 13:58:57.925758 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8fd6\" (UniqueName: \"kubernetes.io/projected/9b83f594-0978-4c2b-9a19-df52826ee516-kube-api-access-q8fd6\") pod \"openstack-operator-index-p7lbq\" (UID: \"9b83f594-0978-4c2b-9a19-df52826ee516\") " pod="openstack-operators/openstack-operator-index-p7lbq" Sep 29 13:58:58 crc kubenswrapper[4634]: I0929 13:58:58.101526 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-p7lbq" Sep 29 13:58:58 crc kubenswrapper[4634]: I0929 13:58:58.552998 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-p7lbq"] Sep 29 13:58:58 crc kubenswrapper[4634]: W0929 13:58:58.554818 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b83f594_0978_4c2b_9a19_df52826ee516.slice/crio-9953626d1d36402053be051c7c30e0f3e768eaf7630d57f4cba557d746e3f5be WatchSource:0}: Error finding container 9953626d1d36402053be051c7c30e0f3e768eaf7630d57f4cba557d746e3f5be: Status 404 returned error can't find the container with id 9953626d1d36402053be051c7c30e0f3e768eaf7630d57f4cba557d746e3f5be Sep 29 13:58:59 crc kubenswrapper[4634]: I0929 13:58:59.086624 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-p7lbq" event={"ID":"9b83f594-0978-4c2b-9a19-df52826ee516","Type":"ContainerStarted","Data":"9953626d1d36402053be051c7c30e0f3e768eaf7630d57f4cba557d746e3f5be"} Sep 29 13:59:00 crc kubenswrapper[4634]: I0929 13:59:00.913232 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-p7lbq"] Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.101492 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-p7lbq" event={"ID":"9b83f594-0978-4c2b-9a19-df52826ee516","Type":"ContainerStarted","Data":"613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed"} Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.101657 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-p7lbq" podUID="9b83f594-0978-4c2b-9a19-df52826ee516" containerName="registry-server" containerID="cri-o://613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed" gracePeriod=2 Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.119259 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-p7lbq" podStartSLOduration=1.795500877 podStartE2EDuration="4.119241238s" podCreationTimestamp="2025-09-29 13:58:57 +0000 UTC" firstStartedPulling="2025-09-29 13:58:58.557265066 +0000 UTC m=+869.125992815" lastFinishedPulling="2025-09-29 13:59:00.881005427 +0000 UTC m=+871.449733176" observedRunningTime="2025-09-29 13:59:01.114579167 +0000 UTC m=+871.683306936" watchObservedRunningTime="2025-09-29 13:59:01.119241238 +0000 UTC m=+871.687969017" Sep 29 13:59:01 crc kubenswrapper[4634]: E0929 13:59:01.217480 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b83f594_0978_4c2b_9a19_df52826ee516.slice/crio-613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b83f594_0978_4c2b_9a19_df52826ee516.slice/crio-conmon-613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed.scope\": RecentStats: unable to find data in memory cache]" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.430887 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-p7lbq" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.530043 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-nzrgv"] Sep 29 13:59:01 crc kubenswrapper[4634]: E0929 13:59:01.530392 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b83f594-0978-4c2b-9a19-df52826ee516" containerName="registry-server" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.530406 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b83f594-0978-4c2b-9a19-df52826ee516" containerName="registry-server" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.530588 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b83f594-0978-4c2b-9a19-df52826ee516" containerName="registry-server" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.531119 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.533158 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nzrgv"] Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.570914 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8fd6\" (UniqueName: \"kubernetes.io/projected/9b83f594-0978-4c2b-9a19-df52826ee516-kube-api-access-q8fd6\") pod \"9b83f594-0978-4c2b-9a19-df52826ee516\" (UID: \"9b83f594-0978-4c2b-9a19-df52826ee516\") " Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.571179 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skrhj\" (UniqueName: \"kubernetes.io/projected/90d0c015-fc7c-4d00-b1a2-83a4e0d68ada-kube-api-access-skrhj\") pod \"openstack-operator-index-nzrgv\" (UID: \"90d0c015-fc7c-4d00-b1a2-83a4e0d68ada\") " pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.577330 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b83f594-0978-4c2b-9a19-df52826ee516-kube-api-access-q8fd6" (OuterVolumeSpecName: "kube-api-access-q8fd6") pod "9b83f594-0978-4c2b-9a19-df52826ee516" (UID: "9b83f594-0978-4c2b-9a19-df52826ee516"). InnerVolumeSpecName "kube-api-access-q8fd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.672498 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skrhj\" (UniqueName: \"kubernetes.io/projected/90d0c015-fc7c-4d00-b1a2-83a4e0d68ada-kube-api-access-skrhj\") pod \"openstack-operator-index-nzrgv\" (UID: \"90d0c015-fc7c-4d00-b1a2-83a4e0d68ada\") " pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.672591 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8fd6\" (UniqueName: \"kubernetes.io/projected/9b83f594-0978-4c2b-9a19-df52826ee516-kube-api-access-q8fd6\") on node \"crc\" DevicePath \"\"" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.694316 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skrhj\" (UniqueName: \"kubernetes.io/projected/90d0c015-fc7c-4d00-b1a2-83a4e0d68ada-kube-api-access-skrhj\") pod \"openstack-operator-index-nzrgv\" (UID: \"90d0c015-fc7c-4d00-b1a2-83a4e0d68ada\") " pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:01 crc kubenswrapper[4634]: I0929 13:59:01.858950 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.117582 4634 generic.go:334] "Generic (PLEG): container finished" podID="9b83f594-0978-4c2b-9a19-df52826ee516" containerID="613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed" exitCode=0 Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.119417 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-p7lbq" Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.127746 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-p7lbq" event={"ID":"9b83f594-0978-4c2b-9a19-df52826ee516","Type":"ContainerDied","Data":"613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed"} Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.127787 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-p7lbq" event={"ID":"9b83f594-0978-4c2b-9a19-df52826ee516","Type":"ContainerDied","Data":"9953626d1d36402053be051c7c30e0f3e768eaf7630d57f4cba557d746e3f5be"} Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.127809 4634 scope.go:117] "RemoveContainer" containerID="613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed" Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.179251 4634 scope.go:117] "RemoveContainer" containerID="613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed" Sep 29 13:59:02 crc kubenswrapper[4634]: E0929 13:59:02.179853 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed\": container with ID starting with 613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed not found: ID does not exist" containerID="613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed" Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.179879 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed"} err="failed to get container status \"613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed\": rpc error: code = NotFound desc = could not find container \"613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed\": container with ID starting with 613161c708e95bc435e492deef004aa4d03e07829e68d95a21c99f44f9e81aed not found: ID does not exist" Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.187881 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-p7lbq"] Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.193266 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-p7lbq"] Sep 29 13:59:02 crc kubenswrapper[4634]: W0929 13:59:02.340053 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90d0c015_fc7c_4d00_b1a2_83a4e0d68ada.slice/crio-63e538bfb19741db075f820fc27a684bfad7f1d019f633a9c843d03014ea2ff2 WatchSource:0}: Error finding container 63e538bfb19741db075f820fc27a684bfad7f1d019f633a9c843d03014ea2ff2: Status 404 returned error can't find the container with id 63e538bfb19741db075f820fc27a684bfad7f1d019f633a9c843d03014ea2ff2 Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.344791 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nzrgv"] Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.630219 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-mgm82" Sep 29 13:59:02 crc kubenswrapper[4634]: I0929 13:59:02.670373 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-7772m" Sep 29 13:59:03 crc kubenswrapper[4634]: I0929 13:59:03.133808 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nzrgv" event={"ID":"90d0c015-fc7c-4d00-b1a2-83a4e0d68ada","Type":"ContainerStarted","Data":"3632846647ca13979025bd1e8d6b2467e4811068f95458929e1cce43f787a207"} Sep 29 13:59:03 crc kubenswrapper[4634]: I0929 13:59:03.133913 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nzrgv" event={"ID":"90d0c015-fc7c-4d00-b1a2-83a4e0d68ada","Type":"ContainerStarted","Data":"63e538bfb19741db075f820fc27a684bfad7f1d019f633a9c843d03014ea2ff2"} Sep 29 13:59:03 crc kubenswrapper[4634]: I0929 13:59:03.163440 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-nzrgv" podStartSLOduration=2.113039422 podStartE2EDuration="2.163354574s" podCreationTimestamp="2025-09-29 13:59:01 +0000 UTC" firstStartedPulling="2025-09-29 13:59:02.343392199 +0000 UTC m=+872.912119988" lastFinishedPulling="2025-09-29 13:59:02.393707391 +0000 UTC m=+872.962435140" observedRunningTime="2025-09-29 13:59:03.162293374 +0000 UTC m=+873.731021193" watchObservedRunningTime="2025-09-29 13:59:03.163354574 +0000 UTC m=+873.732082353" Sep 29 13:59:04 crc kubenswrapper[4634]: I0929 13:59:04.118408 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b83f594-0978-4c2b-9a19-df52826ee516" path="/var/lib/kubelet/pods/9b83f594-0978-4c2b-9a19-df52826ee516/volumes" Sep 29 13:59:11 crc kubenswrapper[4634]: I0929 13:59:11.859527 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:11 crc kubenswrapper[4634]: I0929 13:59:11.860444 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:11 crc kubenswrapper[4634]: I0929 13:59:11.897488 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:12 crc kubenswrapper[4634]: I0929 13:59:12.228779 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-nzrgv" Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.396300 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.396429 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.777736 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9"] Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.780018 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.784925 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-vbfkc" Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.807233 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9"] Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.959926 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-bundle\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.960015 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-util\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:14 crc kubenswrapper[4634]: I0929 13:59:14.960164 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22wr7\" (UniqueName: \"kubernetes.io/projected/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-kube-api-access-22wr7\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:15 crc kubenswrapper[4634]: I0929 13:59:15.061318 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22wr7\" (UniqueName: \"kubernetes.io/projected/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-kube-api-access-22wr7\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:15 crc kubenswrapper[4634]: I0929 13:59:15.061438 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-util\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:15 crc kubenswrapper[4634]: I0929 13:59:15.061459 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-bundle\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:15 crc kubenswrapper[4634]: I0929 13:59:15.062116 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-util\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:15 crc kubenswrapper[4634]: I0929 13:59:15.062437 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-bundle\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:15 crc kubenswrapper[4634]: I0929 13:59:15.094885 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22wr7\" (UniqueName: \"kubernetes.io/projected/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-kube-api-access-22wr7\") pod \"58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:15 crc kubenswrapper[4634]: I0929 13:59:15.120477 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:15 crc kubenswrapper[4634]: I0929 13:59:15.666107 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9"] Sep 29 13:59:16 crc kubenswrapper[4634]: I0929 13:59:16.223694 4634 generic.go:334] "Generic (PLEG): container finished" podID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerID="01a8041892d1163adc4406c03277d636a31666cdbdaf8ae4517fc12a4ef16a15" exitCode=0 Sep 29 13:59:16 crc kubenswrapper[4634]: I0929 13:59:16.223739 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" event={"ID":"bc9a6824-e9e0-4847-96a8-cbc4eccce6de","Type":"ContainerDied","Data":"01a8041892d1163adc4406c03277d636a31666cdbdaf8ae4517fc12a4ef16a15"} Sep 29 13:59:16 crc kubenswrapper[4634]: I0929 13:59:16.223763 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" event={"ID":"bc9a6824-e9e0-4847-96a8-cbc4eccce6de","Type":"ContainerStarted","Data":"f1c832552423b91cbc63c5f4d5f1ba0ddea46de7b3b27319e4bb959182b0fbba"} Sep 29 13:59:17 crc kubenswrapper[4634]: I0929 13:59:17.235554 4634 generic.go:334] "Generic (PLEG): container finished" podID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerID="773b9563065a256891675f19930d35c49dd1bfa9fbb692ed2b8b965dcd131c56" exitCode=0 Sep 29 13:59:17 crc kubenswrapper[4634]: I0929 13:59:17.235682 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" event={"ID":"bc9a6824-e9e0-4847-96a8-cbc4eccce6de","Type":"ContainerDied","Data":"773b9563065a256891675f19930d35c49dd1bfa9fbb692ed2b8b965dcd131c56"} Sep 29 13:59:18 crc kubenswrapper[4634]: I0929 13:59:18.247991 4634 generic.go:334] "Generic (PLEG): container finished" podID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerID="a9b73791f691df4b711245886f8cb67bf8d0c07f802df54eef51d0a11740a287" exitCode=0 Sep 29 13:59:18 crc kubenswrapper[4634]: I0929 13:59:18.248080 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" event={"ID":"bc9a6824-e9e0-4847-96a8-cbc4eccce6de","Type":"ContainerDied","Data":"a9b73791f691df4b711245886f8cb67bf8d0c07f802df54eef51d0a11740a287"} Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.535834 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.643832 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-util\") pod \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.644352 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22wr7\" (UniqueName: \"kubernetes.io/projected/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-kube-api-access-22wr7\") pod \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.644453 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-bundle\") pod \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\" (UID: \"bc9a6824-e9e0-4847-96a8-cbc4eccce6de\") " Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.646336 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-bundle" (OuterVolumeSpecName: "bundle") pod "bc9a6824-e9e0-4847-96a8-cbc4eccce6de" (UID: "bc9a6824-e9e0-4847-96a8-cbc4eccce6de"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.651394 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-kube-api-access-22wr7" (OuterVolumeSpecName: "kube-api-access-22wr7") pod "bc9a6824-e9e0-4847-96a8-cbc4eccce6de" (UID: "bc9a6824-e9e0-4847-96a8-cbc4eccce6de"). InnerVolumeSpecName "kube-api-access-22wr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.660904 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-util" (OuterVolumeSpecName: "util") pod "bc9a6824-e9e0-4847-96a8-cbc4eccce6de" (UID: "bc9a6824-e9e0-4847-96a8-cbc4eccce6de"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.746438 4634 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-util\") on node \"crc\" DevicePath \"\"" Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.746507 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22wr7\" (UniqueName: \"kubernetes.io/projected/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-kube-api-access-22wr7\") on node \"crc\" DevicePath \"\"" Sep 29 13:59:19 crc kubenswrapper[4634]: I0929 13:59:19.746526 4634 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bc9a6824-e9e0-4847-96a8-cbc4eccce6de-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 13:59:20 crc kubenswrapper[4634]: I0929 13:59:20.266988 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" event={"ID":"bc9a6824-e9e0-4847-96a8-cbc4eccce6de","Type":"ContainerDied","Data":"f1c832552423b91cbc63c5f4d5f1ba0ddea46de7b3b27319e4bb959182b0fbba"} Sep 29 13:59:20 crc kubenswrapper[4634]: I0929 13:59:20.267064 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1c832552423b91cbc63c5f4d5f1ba0ddea46de7b3b27319e4bb959182b0fbba" Sep 29 13:59:20 crc kubenswrapper[4634]: I0929 13:59:20.267012 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.385018 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng"] Sep 29 13:59:27 crc kubenswrapper[4634]: E0929 13:59:27.386248 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerName="util" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.386265 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerName="util" Sep 29 13:59:27 crc kubenswrapper[4634]: E0929 13:59:27.386286 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerName="pull" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.386292 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerName="pull" Sep 29 13:59:27 crc kubenswrapper[4634]: E0929 13:59:27.386308 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerName="extract" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.386315 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerName="extract" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.386446 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc9a6824-e9e0-4847-96a8-cbc4eccce6de" containerName="extract" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.387351 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.390248 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-cg66g" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.409544 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng"] Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.465468 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx4lk\" (UniqueName: \"kubernetes.io/projected/aea3000a-d973-4f2b-a521-dd3313901830-kube-api-access-xx4lk\") pod \"openstack-operator-controller-operator-fc7b59957-prtng\" (UID: \"aea3000a-d973-4f2b-a521-dd3313901830\") " pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.566653 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx4lk\" (UniqueName: \"kubernetes.io/projected/aea3000a-d973-4f2b-a521-dd3313901830-kube-api-access-xx4lk\") pod \"openstack-operator-controller-operator-fc7b59957-prtng\" (UID: \"aea3000a-d973-4f2b-a521-dd3313901830\") " pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.594896 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx4lk\" (UniqueName: \"kubernetes.io/projected/aea3000a-d973-4f2b-a521-dd3313901830-kube-api-access-xx4lk\") pod \"openstack-operator-controller-operator-fc7b59957-prtng\" (UID: \"aea3000a-d973-4f2b-a521-dd3313901830\") " pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" Sep 29 13:59:27 crc kubenswrapper[4634]: I0929 13:59:27.708572 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" Sep 29 13:59:28 crc kubenswrapper[4634]: I0929 13:59:28.188400 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng"] Sep 29 13:59:28 crc kubenswrapper[4634]: I0929 13:59:28.335980 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" event={"ID":"aea3000a-d973-4f2b-a521-dd3313901830","Type":"ContainerStarted","Data":"31bfa80c53c2f77eca99f65af651b1f4713eee0bb6e78f76cb4d62352ec12d81"} Sep 29 13:59:33 crc kubenswrapper[4634]: I0929 13:59:33.370607 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" event={"ID":"aea3000a-d973-4f2b-a521-dd3313901830","Type":"ContainerStarted","Data":"96fe16cb1b68de9b6cf1f37bfcdc2af12e724e40eacfa1861fd28dadb4f74663"} Sep 29 13:59:36 crc kubenswrapper[4634]: I0929 13:59:36.391070 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" event={"ID":"aea3000a-d973-4f2b-a521-dd3313901830","Type":"ContainerStarted","Data":"0d35e465723eeedfe8149085947c10a323a5ce3281a304000aedcc42ca55c90c"} Sep 29 13:59:36 crc kubenswrapper[4634]: I0929 13:59:36.391515 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" Sep 29 13:59:36 crc kubenswrapper[4634]: I0929 13:59:36.435724 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" podStartSLOduration=2.034071999 podStartE2EDuration="9.435703257s" podCreationTimestamp="2025-09-29 13:59:27 +0000 UTC" firstStartedPulling="2025-09-29 13:59:28.204026892 +0000 UTC m=+898.772754631" lastFinishedPulling="2025-09-29 13:59:35.60565814 +0000 UTC m=+906.174385889" observedRunningTime="2025-09-29 13:59:36.431630773 +0000 UTC m=+907.000358532" watchObservedRunningTime="2025-09-29 13:59:36.435703257 +0000 UTC m=+907.004431016" Sep 29 13:59:44 crc kubenswrapper[4634]: I0929 13:59:44.395797 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 13:59:44 crc kubenswrapper[4634]: I0929 13:59:44.396529 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 13:59:47 crc kubenswrapper[4634]: I0929 13:59:47.712969 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-fc7b59957-prtng" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.179845 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q"] Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.181661 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.184480 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.184802 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.200821 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q"] Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.325718 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24d68595-200b-4809-9287-0d4c1c332bdd-config-volume\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.325787 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24d68595-200b-4809-9287-0d4c1c332bdd-secret-volume\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.325871 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8z2t\" (UniqueName: \"kubernetes.io/projected/24d68595-200b-4809-9287-0d4c1c332bdd-kube-api-access-b8z2t\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.427419 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8z2t\" (UniqueName: \"kubernetes.io/projected/24d68595-200b-4809-9287-0d4c1c332bdd-kube-api-access-b8z2t\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.427516 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24d68595-200b-4809-9287-0d4c1c332bdd-config-volume\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.427539 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24d68595-200b-4809-9287-0d4c1c332bdd-secret-volume\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.429358 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24d68595-200b-4809-9287-0d4c1c332bdd-config-volume\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.436571 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24d68595-200b-4809-9287-0d4c1c332bdd-secret-volume\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.450319 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8z2t\" (UniqueName: \"kubernetes.io/projected/24d68595-200b-4809-9287-0d4c1c332bdd-kube-api-access-b8z2t\") pod \"collect-profiles-29319240-4wf6q\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.505752 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:00 crc kubenswrapper[4634]: I0929 14:00:00.784656 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q"] Sep 29 14:00:01 crc kubenswrapper[4634]: I0929 14:00:01.610727 4634 generic.go:334] "Generic (PLEG): container finished" podID="24d68595-200b-4809-9287-0d4c1c332bdd" containerID="9ffc0a9dfcc18bb73ed605b715e1bd06066dae1076dabccddff127b43ef5ecaf" exitCode=0 Sep 29 14:00:01 crc kubenswrapper[4634]: I0929 14:00:01.610816 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" event={"ID":"24d68595-200b-4809-9287-0d4c1c332bdd","Type":"ContainerDied","Data":"9ffc0a9dfcc18bb73ed605b715e1bd06066dae1076dabccddff127b43ef5ecaf"} Sep 29 14:00:01 crc kubenswrapper[4634]: I0929 14:00:01.612416 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" event={"ID":"24d68595-200b-4809-9287-0d4c1c332bdd","Type":"ContainerStarted","Data":"1382bfae1e72d22e2031c1aaca14a5ef61705c0703c7f5ea0ebff2b15fa4c576"} Sep 29 14:00:02 crc kubenswrapper[4634]: I0929 14:00:02.928208 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.068796 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24d68595-200b-4809-9287-0d4c1c332bdd-secret-volume\") pod \"24d68595-200b-4809-9287-0d4c1c332bdd\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.068868 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24d68595-200b-4809-9287-0d4c1c332bdd-config-volume\") pod \"24d68595-200b-4809-9287-0d4c1c332bdd\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.069119 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8z2t\" (UniqueName: \"kubernetes.io/projected/24d68595-200b-4809-9287-0d4c1c332bdd-kube-api-access-b8z2t\") pod \"24d68595-200b-4809-9287-0d4c1c332bdd\" (UID: \"24d68595-200b-4809-9287-0d4c1c332bdd\") " Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.069909 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24d68595-200b-4809-9287-0d4c1c332bdd-config-volume" (OuterVolumeSpecName: "config-volume") pod "24d68595-200b-4809-9287-0d4c1c332bdd" (UID: "24d68595-200b-4809-9287-0d4c1c332bdd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.074700 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d68595-200b-4809-9287-0d4c1c332bdd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "24d68595-200b-4809-9287-0d4c1c332bdd" (UID: "24d68595-200b-4809-9287-0d4c1c332bdd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.075397 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24d68595-200b-4809-9287-0d4c1c332bdd-kube-api-access-b8z2t" (OuterVolumeSpecName: "kube-api-access-b8z2t") pod "24d68595-200b-4809-9287-0d4c1c332bdd" (UID: "24d68595-200b-4809-9287-0d4c1c332bdd"). InnerVolumeSpecName "kube-api-access-b8z2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.170462 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8z2t\" (UniqueName: \"kubernetes.io/projected/24d68595-200b-4809-9287-0d4c1c332bdd-kube-api-access-b8z2t\") on node \"crc\" DevicePath \"\"" Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.170506 4634 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/24d68595-200b-4809-9287-0d4c1c332bdd-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.170520 4634 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/24d68595-200b-4809-9287-0d4c1c332bdd-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.625803 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.625797 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q" event={"ID":"24d68595-200b-4809-9287-0d4c1c332bdd","Type":"ContainerDied","Data":"1382bfae1e72d22e2031c1aaca14a5ef61705c0703c7f5ea0ebff2b15fa4c576"} Sep 29 14:00:03 crc kubenswrapper[4634]: I0929 14:00:03.625877 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1382bfae1e72d22e2031c1aaca14a5ef61705c0703c7f5ea0ebff2b15fa4c576" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.101679 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf"] Sep 29 14:00:05 crc kubenswrapper[4634]: E0929 14:00:05.102129 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24d68595-200b-4809-9287-0d4c1c332bdd" containerName="collect-profiles" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.102141 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="24d68595-200b-4809-9287-0d4c1c332bdd" containerName="collect-profiles" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.102244 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="24d68595-200b-4809-9287-0d4c1c332bdd" containerName="collect-profiles" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.102836 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.105492 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-pbfhv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.122631 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.132915 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.137811 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.142344 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-zcht8" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.158682 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.175128 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.176553 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.180264 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-kdpqn" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.187505 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.190053 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.192789 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-676nk" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.209634 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq9x7\" (UniqueName: \"kubernetes.io/projected/7fb4797f-f58b-425a-a987-4559c9d5d481-kube-api-access-zq9x7\") pod \"barbican-operator-controller-manager-6495d75b5-2nbgf\" (UID: \"7fb4797f-f58b-425a-a987-4559c9d5d481\") " pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.232427 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.233551 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.238449 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-4rwf8" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.264018 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.286431 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.300627 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.310864 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4ff8\" (UniqueName: \"kubernetes.io/projected/d82e90ad-ac20-415a-9b7e-168e6472f2a8-kube-api-access-v4ff8\") pod \"cinder-operator-controller-manager-748c574d75-h2wqv\" (UID: \"d82e90ad-ac20-415a-9b7e-168e6472f2a8\") " pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.311181 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq9x7\" (UniqueName: \"kubernetes.io/projected/7fb4797f-f58b-425a-a987-4559c9d5d481-kube-api-access-zq9x7\") pod \"barbican-operator-controller-manager-6495d75b5-2nbgf\" (UID: \"7fb4797f-f58b-425a-a987-4559c9d5d481\") " pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.311307 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6cm6\" (UniqueName: \"kubernetes.io/projected/fc9290c5-62eb-4b93-8b0f-032c2474510f-kube-api-access-p6cm6\") pod \"designate-operator-controller-manager-7d74f4d695-w2nj4\" (UID: \"fc9290c5-62eb-4b93-8b0f-032c2474510f\") " pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.311458 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpzcn\" (UniqueName: \"kubernetes.io/projected/12b1701c-523e-428c-817b-f0ae4914b9fb-kube-api-access-tpzcn\") pod \"glance-operator-controller-manager-67b5d44b7f-fzzjz\" (UID: \"12b1701c-523e-428c-817b-f0ae4914b9fb\") " pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.313724 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.314749 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.317421 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-gk9bs" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.327245 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.328341 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.332464 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.332810 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-5l997" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.347174 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.377955 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.378104 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq9x7\" (UniqueName: \"kubernetes.io/projected/7fb4797f-f58b-425a-a987-4559c9d5d481-kube-api-access-zq9x7\") pod \"barbican-operator-controller-manager-6495d75b5-2nbgf\" (UID: \"7fb4797f-f58b-425a-a987-4559c9d5d481\") " pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.382226 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.383427 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.387654 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-rczxr" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.395253 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.396223 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.408748 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-m89hz" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.412821 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4ff8\" (UniqueName: \"kubernetes.io/projected/d82e90ad-ac20-415a-9b7e-168e6472f2a8-kube-api-access-v4ff8\") pod \"cinder-operator-controller-manager-748c574d75-h2wqv\" (UID: \"d82e90ad-ac20-415a-9b7e-168e6472f2a8\") " pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.412882 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert\") pod \"infra-operator-controller-manager-858cd69f49-7v24f\" (UID: \"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.412916 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22tlm\" (UniqueName: \"kubernetes.io/projected/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-kube-api-access-22tlm\") pod \"infra-operator-controller-manager-858cd69f49-7v24f\" (UID: \"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.412937 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6cm6\" (UniqueName: \"kubernetes.io/projected/fc9290c5-62eb-4b93-8b0f-032c2474510f-kube-api-access-p6cm6\") pod \"designate-operator-controller-manager-7d74f4d695-w2nj4\" (UID: \"fc9290c5-62eb-4b93-8b0f-032c2474510f\") " pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.412965 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z9f7\" (UniqueName: \"kubernetes.io/projected/8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15-kube-api-access-8z9f7\") pod \"heat-operator-controller-manager-8ff95898-b7s9w\" (UID: \"8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15\") " pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.413009 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpzcn\" (UniqueName: \"kubernetes.io/projected/12b1701c-523e-428c-817b-f0ae4914b9fb-kube-api-access-tpzcn\") pod \"glance-operator-controller-manager-67b5d44b7f-fzzjz\" (UID: \"12b1701c-523e-428c-817b-f0ae4914b9fb\") " pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.413048 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6q6b\" (UniqueName: \"kubernetes.io/projected/ba50e2d0-3018-4591-81fd-9e31c5d39951-kube-api-access-x6q6b\") pod \"horizon-operator-controller-manager-695847bc78-g6ncd\" (UID: \"ba50e2d0-3018-4591-81fd-9e31c5d39951\") " pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.413589 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.423264 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.431479 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.431624 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.432776 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.439010 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-2zl49" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.441994 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpzcn\" (UniqueName: \"kubernetes.io/projected/12b1701c-523e-428c-817b-f0ae4914b9fb-kube-api-access-tpzcn\") pod \"glance-operator-controller-manager-67b5d44b7f-fzzjz\" (UID: \"12b1701c-523e-428c-817b-f0ae4914b9fb\") " pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.443340 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.445448 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.457068 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-vtw6l" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.458402 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4ff8\" (UniqueName: \"kubernetes.io/projected/d82e90ad-ac20-415a-9b7e-168e6472f2a8-kube-api-access-v4ff8\") pod \"cinder-operator-controller-manager-748c574d75-h2wqv\" (UID: \"d82e90ad-ac20-415a-9b7e-168e6472f2a8\") " pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.479345 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.482169 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.482750 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6cm6\" (UniqueName: \"kubernetes.io/projected/fc9290c5-62eb-4b93-8b0f-032c2474510f-kube-api-access-p6cm6\") pod \"designate-operator-controller-manager-7d74f4d695-w2nj4\" (UID: \"fc9290c5-62eb-4b93-8b0f-032c2474510f\") " pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.498411 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.499442 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.502380 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-zw76x" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.510418 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.514825 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22tlm\" (UniqueName: \"kubernetes.io/projected/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-kube-api-access-22tlm\") pod \"infra-operator-controller-manager-858cd69f49-7v24f\" (UID: \"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.514877 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z9f7\" (UniqueName: \"kubernetes.io/projected/8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15-kube-api-access-8z9f7\") pod \"heat-operator-controller-manager-8ff95898-b7s9w\" (UID: \"8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15\") " pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.514918 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7bfj\" (UniqueName: \"kubernetes.io/projected/3fce3aee-b45a-4d80-a2e5-529632ed8a2d-kube-api-access-x7bfj\") pod \"keystone-operator-controller-manager-7bf498966c-9l547\" (UID: \"3fce3aee-b45a-4d80-a2e5-529632ed8a2d\") " pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.514949 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj62f\" (UniqueName: \"kubernetes.io/projected/c00a2f33-36be-4039-a5ae-73df39f84d1d-kube-api-access-gj62f\") pod \"ironic-operator-controller-manager-9fc8d5567-c68h7\" (UID: \"c00a2f33-36be-4039-a5ae-73df39f84d1d\") " pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.514981 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6q6b\" (UniqueName: \"kubernetes.io/projected/ba50e2d0-3018-4591-81fd-9e31c5d39951-kube-api-access-x6q6b\") pod \"horizon-operator-controller-manager-695847bc78-g6ncd\" (UID: \"ba50e2d0-3018-4591-81fd-9e31c5d39951\") " pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.515004 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert\") pod \"infra-operator-controller-manager-858cd69f49-7v24f\" (UID: \"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:05 crc kubenswrapper[4634]: E0929 14:00:05.515132 4634 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 14:00:05 crc kubenswrapper[4634]: E0929 14:00:05.515180 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert podName:e220a6dd-ab23-4eeb-9cb7-8496c72cc19f nodeName:}" failed. No retries permitted until 2025-09-29 14:00:06.015161907 +0000 UTC m=+936.583889656 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert") pod "infra-operator-controller-manager-858cd69f49-7v24f" (UID: "e220a6dd-ab23-4eeb-9cb7-8496c72cc19f") : secret "infra-operator-webhook-server-cert" not found Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.535725 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.540392 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.541145 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.570174 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.572358 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.588052 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.590399 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z9f7\" (UniqueName: \"kubernetes.io/projected/8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15-kube-api-access-8z9f7\") pod \"heat-operator-controller-manager-8ff95898-b7s9w\" (UID: \"8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15\") " pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.591345 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.599371 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22tlm\" (UniqueName: \"kubernetes.io/projected/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-kube-api-access-22tlm\") pod \"infra-operator-controller-manager-858cd69f49-7v24f\" (UID: \"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.599729 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-hdjdc" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.599938 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-vw4d6" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.610121 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6q6b\" (UniqueName: \"kubernetes.io/projected/ba50e2d0-3018-4591-81fd-9e31c5d39951-kube-api-access-x6q6b\") pod \"horizon-operator-controller-manager-695847bc78-g6ncd\" (UID: \"ba50e2d0-3018-4591-81fd-9e31c5d39951\") " pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.649262 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmnlq\" (UniqueName: \"kubernetes.io/projected/0b0b3b6f-0579-4a42-bad2-ecbda8906426-kube-api-access-qmnlq\") pod \"mariadb-operator-controller-manager-687b9cf756-hd2mv\" (UID: \"0b0b3b6f-0579-4a42-bad2-ecbda8906426\") " pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.649306 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd4mc\" (UniqueName: \"kubernetes.io/projected/be9fbcb2-15d0-4fc2-b745-41178d406fca-kube-api-access-sd4mc\") pod \"neutron-operator-controller-manager-54d766c9f9-k2dqf\" (UID: \"be9fbcb2-15d0-4fc2-b745-41178d406fca\") " pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.649357 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8dbr\" (UniqueName: \"kubernetes.io/projected/ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df-kube-api-access-j8dbr\") pod \"manila-operator-controller-manager-56cf9c6b99-bd4mn\" (UID: \"ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df\") " pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.649383 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7bfj\" (UniqueName: \"kubernetes.io/projected/3fce3aee-b45a-4d80-a2e5-529632ed8a2d-kube-api-access-x7bfj\") pod \"keystone-operator-controller-manager-7bf498966c-9l547\" (UID: \"3fce3aee-b45a-4d80-a2e5-529632ed8a2d\") " pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.649419 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj62f\" (UniqueName: \"kubernetes.io/projected/c00a2f33-36be-4039-a5ae-73df39f84d1d-kube-api-access-gj62f\") pod \"ironic-operator-controller-manager-9fc8d5567-c68h7\" (UID: \"c00a2f33-36be-4039-a5ae-73df39f84d1d\") " pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.729097 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.730815 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj62f\" (UniqueName: \"kubernetes.io/projected/c00a2f33-36be-4039-a5ae-73df39f84d1d-kube-api-access-gj62f\") pod \"ironic-operator-controller-manager-9fc8d5567-c68h7\" (UID: \"c00a2f33-36be-4039-a5ae-73df39f84d1d\") " pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.731765 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.750577 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.751629 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwr6n\" (UniqueName: \"kubernetes.io/projected/65cef236-09ce-4623-9cd8-9d4c0e1f8346-kube-api-access-fwr6n\") pod \"octavia-operator-controller-manager-76fcc6dc7c-7c4mq\" (UID: \"65cef236-09ce-4623-9cd8-9d4c0e1f8346\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.751698 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmnlq\" (UniqueName: \"kubernetes.io/projected/0b0b3b6f-0579-4a42-bad2-ecbda8906426-kube-api-access-qmnlq\") pod \"mariadb-operator-controller-manager-687b9cf756-hd2mv\" (UID: \"0b0b3b6f-0579-4a42-bad2-ecbda8906426\") " pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.751730 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd4mc\" (UniqueName: \"kubernetes.io/projected/be9fbcb2-15d0-4fc2-b745-41178d406fca-kube-api-access-sd4mc\") pod \"neutron-operator-controller-manager-54d766c9f9-k2dqf\" (UID: \"be9fbcb2-15d0-4fc2-b745-41178d406fca\") " pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.751789 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8dbr\" (UniqueName: \"kubernetes.io/projected/ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df-kube-api-access-j8dbr\") pod \"manila-operator-controller-manager-56cf9c6b99-bd4mn\" (UID: \"ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df\") " pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.751837 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6c4mq\" (UniqueName: \"kubernetes.io/projected/11b77d0f-14f2-47d2-839a-6e06505787a2-kube-api-access-6c4mq\") pod \"nova-operator-controller-manager-c7c776c96-9z55w\" (UID: \"11b77d0f-14f2-47d2-839a-6e06505787a2\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.768568 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.769667 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.777251 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-mrmxb" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.777538 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.790761 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.791828 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.792866 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd4mc\" (UniqueName: \"kubernetes.io/projected/be9fbcb2-15d0-4fc2-b745-41178d406fca-kube-api-access-sd4mc\") pod \"neutron-operator-controller-manager-54d766c9f9-k2dqf\" (UID: \"be9fbcb2-15d0-4fc2-b745-41178d406fca\") " pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.799343 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-fp988" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.806271 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7bfj\" (UniqueName: \"kubernetes.io/projected/3fce3aee-b45a-4d80-a2e5-529632ed8a2d-kube-api-access-x7bfj\") pod \"keystone-operator-controller-manager-7bf498966c-9l547\" (UID: \"3fce3aee-b45a-4d80-a2e5-529632ed8a2d\") " pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.812368 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmnlq\" (UniqueName: \"kubernetes.io/projected/0b0b3b6f-0579-4a42-bad2-ecbda8906426-kube-api-access-qmnlq\") pod \"mariadb-operator-controller-manager-687b9cf756-hd2mv\" (UID: \"0b0b3b6f-0579-4a42-bad2-ecbda8906426\") " pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.822721 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8dbr\" (UniqueName: \"kubernetes.io/projected/ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df-kube-api-access-j8dbr\") pod \"manila-operator-controller-manager-56cf9c6b99-bd4mn\" (UID: \"ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df\") " pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.849813 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.853328 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-rq5pg\" (UID: \"e6c834dc-3418-4d52-ade3-02c1043d6360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.853398 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6c4mq\" (UniqueName: \"kubernetes.io/projected/11b77d0f-14f2-47d2-839a-6e06505787a2-kube-api-access-6c4mq\") pod \"nova-operator-controller-manager-c7c776c96-9z55w\" (UID: \"11b77d0f-14f2-47d2-839a-6e06505787a2\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.853426 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwr6n\" (UniqueName: \"kubernetes.io/projected/65cef236-09ce-4623-9cd8-9d4c0e1f8346-kube-api-access-fwr6n\") pod \"octavia-operator-controller-manager-76fcc6dc7c-7c4mq\" (UID: \"65cef236-09ce-4623-9cd8-9d4c0e1f8346\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.853481 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkh4x\" (UniqueName: \"kubernetes.io/projected/0808341c-4037-4360-bc34-dce11a7e8088-kube-api-access-zkh4x\") pod \"ovn-operator-controller-manager-5f95c46c78-2vb6t\" (UID: \"0808341c-4037-4360-bc34-dce11a7e8088\") " pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.853508 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48k4h\" (UniqueName: \"kubernetes.io/projected/e6c834dc-3418-4d52-ade3-02c1043d6360-kube-api-access-48k4h\") pod \"openstack-baremetal-operator-controller-manager-6d776955-rq5pg\" (UID: \"e6c834dc-3418-4d52-ade3-02c1043d6360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.865886 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.866728 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.871352 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.917791 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-774b97b48-66mbm"] Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.932431 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.939240 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6c4mq\" (UniqueName: \"kubernetes.io/projected/11b77d0f-14f2-47d2-839a-6e06505787a2-kube-api-access-6c4mq\") pod \"nova-operator-controller-manager-c7c776c96-9z55w\" (UID: \"11b77d0f-14f2-47d2-839a-6e06505787a2\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.939439 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-tzbc6" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.951667 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.951826 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwr6n\" (UniqueName: \"kubernetes.io/projected/65cef236-09ce-4623-9cd8-9d4c0e1f8346-kube-api-access-fwr6n\") pod \"octavia-operator-controller-manager-76fcc6dc7c-7c4mq\" (UID: \"65cef236-09ce-4623-9cd8-9d4c0e1f8346\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.954651 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdxjq\" (UniqueName: \"kubernetes.io/projected/e777128b-ae24-469f-81bb-adf78608f20e-kube-api-access-qdxjq\") pod \"placement-operator-controller-manager-774b97b48-66mbm\" (UID: \"e777128b-ae24-469f-81bb-adf78608f20e\") " pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.954695 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkh4x\" (UniqueName: \"kubernetes.io/projected/0808341c-4037-4360-bc34-dce11a7e8088-kube-api-access-zkh4x\") pod \"ovn-operator-controller-manager-5f95c46c78-2vb6t\" (UID: \"0808341c-4037-4360-bc34-dce11a7e8088\") " pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.954724 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48k4h\" (UniqueName: \"kubernetes.io/projected/e6c834dc-3418-4d52-ade3-02c1043d6360-kube-api-access-48k4h\") pod \"openstack-baremetal-operator-controller-manager-6d776955-rq5pg\" (UID: \"e6c834dc-3418-4d52-ade3-02c1043d6360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:05 crc kubenswrapper[4634]: I0929 14:00:05.954752 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-rq5pg\" (UID: \"e6c834dc-3418-4d52-ade3-02c1043d6360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:05 crc kubenswrapper[4634]: E0929 14:00:05.954905 4634 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 14:00:05 crc kubenswrapper[4634]: E0929 14:00:05.954968 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert podName:e6c834dc-3418-4d52-ade3-02c1043d6360 nodeName:}" failed. No retries permitted until 2025-09-29 14:00:06.454947548 +0000 UTC m=+937.023675297 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-rq5pg" (UID: "e6c834dc-3418-4d52-ade3-02c1043d6360") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.001660 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkh4x\" (UniqueName: \"kubernetes.io/projected/0808341c-4037-4360-bc34-dce11a7e8088-kube-api-access-zkh4x\") pod \"ovn-operator-controller-manager-5f95c46c78-2vb6t\" (UID: \"0808341c-4037-4360-bc34-dce11a7e8088\") " pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.005551 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.008722 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.010493 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.011125 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.016156 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-txd2v" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.023828 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48k4h\" (UniqueName: \"kubernetes.io/projected/e6c834dc-3418-4d52-ade3-02c1043d6360-kube-api-access-48k4h\") pod \"openstack-baremetal-operator-controller-manager-6d776955-rq5pg\" (UID: \"e6c834dc-3418-4d52-ade3-02c1043d6360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.030460 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.035160 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.049151 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.050841 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.050987 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.058882 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.059511 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-q95px" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.060234 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8h4d\" (UniqueName: \"kubernetes.io/projected/b108e534-0a60-4d24-a6b3-9b967045469a-kube-api-access-p8h4d\") pod \"swift-operator-controller-manager-bc7dc7bd9-6gkh8\" (UID: \"b108e534-0a60-4d24-a6b3-9b967045469a\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.060275 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert\") pod \"infra-operator-controller-manager-858cd69f49-7v24f\" (UID: \"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.060313 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdxjq\" (UniqueName: \"kubernetes.io/projected/e777128b-ae24-469f-81bb-adf78608f20e-kube-api-access-qdxjq\") pod \"placement-operator-controller-manager-774b97b48-66mbm\" (UID: \"e777128b-ae24-469f-81bb-adf78608f20e\") " pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" Sep 29 14:00:06 crc kubenswrapper[4634]: E0929 14:00:06.060786 4634 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 14:00:06 crc kubenswrapper[4634]: E0929 14:00:06.060831 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert podName:e220a6dd-ab23-4eeb-9cb7-8496c72cc19f nodeName:}" failed. No retries permitted until 2025-09-29 14:00:07.060812962 +0000 UTC m=+937.629540711 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert") pod "infra-operator-controller-manager-858cd69f49-7v24f" (UID: "e220a6dd-ab23-4eeb-9cb7-8496c72cc19f") : secret "infra-operator-webhook-server-cert" not found Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.086827 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-774b97b48-66mbm"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.093742 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdxjq\" (UniqueName: \"kubernetes.io/projected/e777128b-ae24-469f-81bb-adf78608f20e-kube-api-access-qdxjq\") pod \"placement-operator-controller-manager-774b97b48-66mbm\" (UID: \"e777128b-ae24-469f-81bb-adf78608f20e\") " pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.100920 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.110279 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-knb5c"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.116608 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.128352 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-5dj9c" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.134774 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.168547 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8h4d\" (UniqueName: \"kubernetes.io/projected/b108e534-0a60-4d24-a6b3-9b967045469a-kube-api-access-p8h4d\") pod \"swift-operator-controller-manager-bc7dc7bd9-6gkh8\" (UID: \"b108e534-0a60-4d24-a6b3-9b967045469a\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.168750 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4pr5\" (UniqueName: \"kubernetes.io/projected/6cb280b6-d86c-42cb-8887-819b38c304b8-kube-api-access-s4pr5\") pod \"test-operator-controller-manager-f66b554c6-knb5c\" (UID: \"6cb280b6-d86c-42cb-8887-819b38c304b8\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.168794 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxrmc\" (UniqueName: \"kubernetes.io/projected/97de340e-634f-47e2-8a37-800f2261e43b-kube-api-access-lxrmc\") pod \"telemetry-operator-controller-manager-5bf96cfbc4-2dp75\" (UID: \"97de340e-634f-47e2-8a37-800f2261e43b\") " pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.194796 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.197556 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8h4d\" (UniqueName: \"kubernetes.io/projected/b108e534-0a60-4d24-a6b3-9b967045469a-kube-api-access-p8h4d\") pod \"swift-operator-controller-manager-bc7dc7bd9-6gkh8\" (UID: \"b108e534-0a60-4d24-a6b3-9b967045469a\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.215384 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.216568 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-knb5c"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.216650 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.216772 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.226162 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-7tj44" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.274737 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4pr5\" (UniqueName: \"kubernetes.io/projected/6cb280b6-d86c-42cb-8887-819b38c304b8-kube-api-access-s4pr5\") pod \"test-operator-controller-manager-f66b554c6-knb5c\" (UID: \"6cb280b6-d86c-42cb-8887-819b38c304b8\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.274811 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxrmc\" (UniqueName: \"kubernetes.io/projected/97de340e-634f-47e2-8a37-800f2261e43b-kube-api-access-lxrmc\") pod \"telemetry-operator-controller-manager-5bf96cfbc4-2dp75\" (UID: \"97de340e-634f-47e2-8a37-800f2261e43b\") " pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.274955 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z7kr\" (UniqueName: \"kubernetes.io/projected/45b61a8e-44b5-4cca-85b6-344738b51f52-kube-api-access-9z7kr\") pod \"watcher-operator-controller-manager-76669f99c-7w4vx\" (UID: \"45b61a8e-44b5-4cca-85b6-344738b51f52\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.297523 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-667746d855-fd8px"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.298535 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.305797 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.305842 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-wvjnm" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.326038 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxrmc\" (UniqueName: \"kubernetes.io/projected/97de340e-634f-47e2-8a37-800f2261e43b-kube-api-access-lxrmc\") pod \"telemetry-operator-controller-manager-5bf96cfbc4-2dp75\" (UID: \"97de340e-634f-47e2-8a37-800f2261e43b\") " pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.331078 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4pr5\" (UniqueName: \"kubernetes.io/projected/6cb280b6-d86c-42cb-8887-819b38c304b8-kube-api-access-s4pr5\") pod \"test-operator-controller-manager-f66b554c6-knb5c\" (UID: \"6cb280b6-d86c-42cb-8887-819b38c304b8\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.353489 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-667746d855-fd8px"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.375727 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfjsz\" (UniqueName: \"kubernetes.io/projected/63fb32c1-31c4-4ab0-b10e-c467e2c74410-kube-api-access-dfjsz\") pod \"openstack-operator-controller-manager-667746d855-fd8px\" (UID: \"63fb32c1-31c4-4ab0-b10e-c467e2c74410\") " pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.375793 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z7kr\" (UniqueName: \"kubernetes.io/projected/45b61a8e-44b5-4cca-85b6-344738b51f52-kube-api-access-9z7kr\") pod \"watcher-operator-controller-manager-76669f99c-7w4vx\" (UID: \"45b61a8e-44b5-4cca-85b6-344738b51f52\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.375838 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert\") pod \"openstack-operator-controller-manager-667746d855-fd8px\" (UID: \"63fb32c1-31c4-4ab0-b10e-c467e2c74410\") " pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.377264 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.442675 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.446792 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-c5xzc" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.470076 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z7kr\" (UniqueName: \"kubernetes.io/projected/45b61a8e-44b5-4cca-85b6-344738b51f52-kube-api-access-9z7kr\") pod \"watcher-operator-controller-manager-76669f99c-7w4vx\" (UID: \"45b61a8e-44b5-4cca-85b6-344738b51f52\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.494990 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw8fh\" (UniqueName: \"kubernetes.io/projected/c5c15e4b-b806-4d39-915f-c6e60e6d72ea-kube-api-access-hw8fh\") pod \"rabbitmq-cluster-operator-manager-79d8469568-cvk6x\" (UID: \"c5c15e4b-b806-4d39-915f-c6e60e6d72ea\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.495131 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfjsz\" (UniqueName: \"kubernetes.io/projected/63fb32c1-31c4-4ab0-b10e-c467e2c74410-kube-api-access-dfjsz\") pod \"openstack-operator-controller-manager-667746d855-fd8px\" (UID: \"63fb32c1-31c4-4ab0-b10e-c467e2c74410\") " pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.499214 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.504465 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-rq5pg\" (UID: \"e6c834dc-3418-4d52-ade3-02c1043d6360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.504508 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert\") pod \"openstack-operator-controller-manager-667746d855-fd8px\" (UID: \"63fb32c1-31c4-4ab0-b10e-c467e2c74410\") " pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:06 crc kubenswrapper[4634]: E0929 14:00:06.504657 4634 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 14:00:06 crc kubenswrapper[4634]: E0929 14:00:06.504706 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert podName:63fb32c1-31c4-4ab0-b10e-c467e2c74410 nodeName:}" failed. No retries permitted until 2025-09-29 14:00:07.004687463 +0000 UTC m=+937.573415212 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert") pod "openstack-operator-controller-manager-667746d855-fd8px" (UID: "63fb32c1-31c4-4ab0-b10e-c467e2c74410") : secret "webhook-server-cert" not found Sep 29 14:00:06 crc kubenswrapper[4634]: E0929 14:00:06.504893 4634 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 14:00:06 crc kubenswrapper[4634]: E0929 14:00:06.508490 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert podName:e6c834dc-3418-4d52-ade3-02c1043d6360 nodeName:}" failed. No retries permitted until 2025-09-29 14:00:07.508453836 +0000 UTC m=+938.077181585 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-rq5pg" (UID: "e6c834dc-3418-4d52-ade3-02c1043d6360") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.527913 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.540545 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.576348 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfjsz\" (UniqueName: \"kubernetes.io/projected/63fb32c1-31c4-4ab0-b10e-c467e2c74410-kube-api-access-dfjsz\") pod \"openstack-operator-controller-manager-667746d855-fd8px\" (UID: \"63fb32c1-31c4-4ab0-b10e-c467e2c74410\") " pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.576441 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.577526 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.607886 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw8fh\" (UniqueName: \"kubernetes.io/projected/c5c15e4b-b806-4d39-915f-c6e60e6d72ea-kube-api-access-hw8fh\") pod \"rabbitmq-cluster-operator-manager-79d8469568-cvk6x\" (UID: \"c5c15e4b-b806-4d39-915f-c6e60e6d72ea\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.657860 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw8fh\" (UniqueName: \"kubernetes.io/projected/c5c15e4b-b806-4d39-915f-c6e60e6d72ea-kube-api-access-hw8fh\") pod \"rabbitmq-cluster-operator-manager-79d8469568-cvk6x\" (UID: \"c5c15e4b-b806-4d39-915f-c6e60e6d72ea\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.792003 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf"] Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.893552 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" Sep 29 14:00:06 crc kubenswrapper[4634]: I0929 14:00:06.957949 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv"] Sep 29 14:00:06 crc kubenswrapper[4634]: W0929 14:00:06.985360 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fb4797f_f58b_425a_a987_4559c9d5d481.slice/crio-c8cb5f2751a9090719b260d941dd06e92f7767f06811eaa3418829cc9107491b WatchSource:0}: Error finding container c8cb5f2751a9090719b260d941dd06e92f7767f06811eaa3418829cc9107491b: Status 404 returned error can't find the container with id c8cb5f2751a9090719b260d941dd06e92f7767f06811eaa3418829cc9107491b Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.038942 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert\") pod \"openstack-operator-controller-manager-667746d855-fd8px\" (UID: \"63fb32c1-31c4-4ab0-b10e-c467e2c74410\") " pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:07 crc kubenswrapper[4634]: E0929 14:00:07.039126 4634 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 14:00:07 crc kubenswrapper[4634]: E0929 14:00:07.039371 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert podName:63fb32c1-31c4-4ab0-b10e-c467e2c74410 nodeName:}" failed. No retries permitted until 2025-09-29 14:00:08.039357528 +0000 UTC m=+938.608085277 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert") pod "openstack-operator-controller-manager-667746d855-fd8px" (UID: "63fb32c1-31c4-4ab0-b10e-c467e2c74410") : secret "webhook-server-cert" not found Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.047382 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.140174 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert\") pod \"infra-operator-controller-manager-858cd69f49-7v24f\" (UID: \"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.148762 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e220a6dd-ab23-4eeb-9cb7-8496c72cc19f-cert\") pod \"infra-operator-controller-manager-858cd69f49-7v24f\" (UID: \"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f\") " pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.154855 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.322344 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.336286 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4"] Sep 29 14:00:07 crc kubenswrapper[4634]: W0929 14:00:07.361679 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc9290c5_62eb_4b93_8b0f_032c2474510f.slice/crio-93a58d7361d69a8d83f3e4568f61ed800cba879f0f4012dae83e9a1363d44f41 WatchSource:0}: Error finding container 93a58d7361d69a8d83f3e4568f61ed800cba879f0f4012dae83e9a1363d44f41: Status 404 returned error can't find the container with id 93a58d7361d69a8d83f3e4568f61ed800cba879f0f4012dae83e9a1363d44f41 Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.415342 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.425052 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.513767 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.564754 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-rq5pg\" (UID: \"e6c834dc-3418-4d52-ade3-02c1043d6360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.583755 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e6c834dc-3418-4d52-ade3-02c1043d6360-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-rq5pg\" (UID: \"e6c834dc-3418-4d52-ade3-02c1043d6360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.668573 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.727150 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.727574 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t"] Sep 29 14:00:07 crc kubenswrapper[4634]: W0929 14:00:07.734327 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc00a2f33_36be_4039_a5ae_73df39f84d1d.slice/crio-9b4734b18fddff4b0e1d165c2e269450bad8046748538dd85b71721768d8d7a4 WatchSource:0}: Error finding container 9b4734b18fddff4b0e1d165c2e269450bad8046748538dd85b71721768d8d7a4: Status 404 returned error can't find the container with id 9b4734b18fddff4b0e1d165c2e269450bad8046748538dd85b71721768d8d7a4 Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.756324 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.760473 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-774b97b48-66mbm"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.800535 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" event={"ID":"0b0b3b6f-0579-4a42-bad2-ecbda8906426","Type":"ContainerStarted","Data":"43d0b778e6ebcb860a8eb26078610df9132206f53fbcb64c1146399b394b4b60"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.803630 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" event={"ID":"fc9290c5-62eb-4b93-8b0f-032c2474510f","Type":"ContainerStarted","Data":"93a58d7361d69a8d83f3e4568f61ed800cba879f0f4012dae83e9a1363d44f41"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.834542 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" event={"ID":"11b77d0f-14f2-47d2-839a-6e06505787a2","Type":"ContainerStarted","Data":"173b2f1836518fdcf1459b149bd28aac5c47af7c5d8c7457f4a02049174aa8a7"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.840863 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" event={"ID":"be9fbcb2-15d0-4fc2-b745-41178d406fca","Type":"ContainerStarted","Data":"7477979dae5c08f7b22cfd6639e268c4b2a07a4bdb2fca528c4d9a2872550b0d"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.841593 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" event={"ID":"8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15","Type":"ContainerStarted","Data":"57b4430895dff7840b0657934cea3af29fbbf21ac3c09a649c0f6deec4a50ee1"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.842278 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" event={"ID":"0808341c-4037-4360-bc34-dce11a7e8088","Type":"ContainerStarted","Data":"48cc189e1824a6bada9aadf2269190f7765e5fbaa95befb7bde73b7d1ae8f772"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.846819 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" event={"ID":"e777128b-ae24-469f-81bb-adf78608f20e","Type":"ContainerStarted","Data":"f552eb7c441d5791e101bafe4ea3afd76a2f9702886e7d9bd1bd08de9f683905"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.847547 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" event={"ID":"12b1701c-523e-428c-817b-f0ae4914b9fb","Type":"ContainerStarted","Data":"25127aa371355a38b4bd36e9bc78b4ce8500d54acaad4e939495d0910365a5c5"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.848287 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" event={"ID":"d82e90ad-ac20-415a-9b7e-168e6472f2a8","Type":"ContainerStarted","Data":"2fc94dc8484c3867548fa3a9b13c541270eebe89c040995143898da9ed57c623"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.849003 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" event={"ID":"ba50e2d0-3018-4591-81fd-9e31c5d39951","Type":"ContainerStarted","Data":"11bea21b58fce1f6863a1fa3cf0fcbc1b47a0b26f2bb0f0672de86f7c70ea400"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.867894 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" event={"ID":"c00a2f33-36be-4039-a5ae-73df39f84d1d","Type":"ContainerStarted","Data":"9b4734b18fddff4b0e1d165c2e269450bad8046748538dd85b71721768d8d7a4"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.880551 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" event={"ID":"7fb4797f-f58b-425a-a987-4559c9d5d481","Type":"ContainerStarted","Data":"c8cb5f2751a9090719b260d941dd06e92f7767f06811eaa3418829cc9107491b"} Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.920945 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.931434 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.965249 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8"] Sep 29 14:00:07 crc kubenswrapper[4634]: I0929 14:00:07.993337 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547"] Sep 29 14:00:07 crc kubenswrapper[4634]: E0929 14:00:07.997411 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p8h4d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bc7dc7bd9-6gkh8_openstack-operators(b108e534-0a60-4d24-a6b3-9b967045469a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 14:00:08 crc kubenswrapper[4634]: W0929 14:00:08.010409 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fce3aee_b45a_4d80_a2e5_529632ed8a2d.slice/crio-494fff4d3d2850397ea6bdf9a956e25385a3c73ab24d301a4920ae94b526f152 WatchSource:0}: Error finding container 494fff4d3d2850397ea6bdf9a956e25385a3c73ab24d301a4920ae94b526f152: Status 404 returned error can't find the container with id 494fff4d3d2850397ea6bdf9a956e25385a3c73ab24d301a4920ae94b526f152 Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.097744 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert\") pod \"openstack-operator-controller-manager-667746d855-fd8px\" (UID: \"63fb32c1-31c4-4ab0-b10e-c467e2c74410\") " pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.109062 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/63fb32c1-31c4-4ab0-b10e-c467e2c74410-cert\") pod \"openstack-operator-controller-manager-667746d855-fd8px\" (UID: \"63fb32c1-31c4-4ab0-b10e-c467e2c74410\") " pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.138748 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.154882 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx"] Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.165913 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75"] Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.192967 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-knb5c"] Sep 29 14:00:08 crc kubenswrapper[4634]: W0929 14:00:08.216400 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45b61a8e_44b5_4cca_85b6_344738b51f52.slice/crio-15947fc0593b52e5399f22fd2f5661e25dd4b98887c522ba13eeb503e9a7476b WatchSource:0}: Error finding container 15947fc0593b52e5399f22fd2f5661e25dd4b98887c522ba13eeb503e9a7476b: Status 404 returned error can't find the container with id 15947fc0593b52e5399f22fd2f5661e25dd4b98887c522ba13eeb503e9a7476b Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.224010 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f"] Sep 29 14:00:08 crc kubenswrapper[4634]: W0929 14:00:08.229256 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97de340e_634f_47e2_8a37_800f2261e43b.slice/crio-8e3301320369c3b6fe5003042cf8ea779f8b6200b3027bbf5932b96175b44bb5 WatchSource:0}: Error finding container 8e3301320369c3b6fe5003042cf8ea779f8b6200b3027bbf5932b96175b44bb5: Status 404 returned error can't find the container with id 8e3301320369c3b6fe5003042cf8ea779f8b6200b3027bbf5932b96175b44bb5 Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.238020 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x"] Sep 29 14:00:08 crc kubenswrapper[4634]: E0929 14:00:08.239509 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" podUID="b108e534-0a60-4d24-a6b3-9b967045469a" Sep 29 14:00:08 crc kubenswrapper[4634]: E0929 14:00:08.240230 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9z7kr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-76669f99c-7w4vx_openstack-operators(45b61a8e-44b5-4cca-85b6-344738b51f52): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 14:00:08 crc kubenswrapper[4634]: E0929 14:00:08.253377 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hw8fh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-cvk6x_openstack-operators(c5c15e4b-b806-4d39-915f-c6e60e6d72ea): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 14:00:08 crc kubenswrapper[4634]: E0929 14:00:08.254974 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" podUID="c5c15e4b-b806-4d39-915f-c6e60e6d72ea" Sep 29 14:00:08 crc kubenswrapper[4634]: E0929 14:00:08.261570 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:87a522d480797f54499bcd1c4a48837e1b17c33d4cc43e99ed7a53b8cedb17c7,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-22tlm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-858cd69f49-7v24f_openstack-operators(e220a6dd-ab23-4eeb-9cb7-8496c72cc19f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 14:00:08 crc kubenswrapper[4634]: E0929 14:00:08.261752 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:ae6fda8cafd6c3ab5d5e9c599d15b02ace61b8eacbac4de3df50427dfab6a0c0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lxrmc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5bf96cfbc4-2dp75_openstack-operators(97de340e-634f-47e2-8a37-800f2261e43b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.386278 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg"] Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.790761 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-667746d855-fd8px"] Sep 29 14:00:08 crc kubenswrapper[4634]: W0929 14:00:08.834412 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63fb32c1_31c4_4ab0_b10e_c467e2c74410.slice/crio-a58e096a6a0a7e0b78bc172868c33d2da26e41f2c3327873b366cc4cdc98e0ec WatchSource:0}: Error finding container a58e096a6a0a7e0b78bc172868c33d2da26e41f2c3327873b366cc4cdc98e0ec: Status 404 returned error can't find the container with id a58e096a6a0a7e0b78bc172868c33d2da26e41f2c3327873b366cc4cdc98e0ec Sep 29 14:00:08 crc kubenswrapper[4634]: E0929 14:00:08.836706 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" podUID="e220a6dd-ab23-4eeb-9cb7-8496c72cc19f" Sep 29 14:00:08 crc kubenswrapper[4634]: I0929 14:00:08.997516 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" event={"ID":"3fce3aee-b45a-4d80-a2e5-529632ed8a2d","Type":"ContainerStarted","Data":"494fff4d3d2850397ea6bdf9a956e25385a3c73ab24d301a4920ae94b526f152"} Sep 29 14:00:09 crc kubenswrapper[4634]: E0929 14:00:09.006586 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" podUID="97de340e-634f-47e2-8a37-800f2261e43b" Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.033451 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" event={"ID":"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f","Type":"ContainerStarted","Data":"0861f64ce59cc4211df636f4ca9e9a738cc2225b568edaa2bbb23b1d2b032285"} Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.033500 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" event={"ID":"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f","Type":"ContainerStarted","Data":"667ed381d5cea2f36c1feea94be8e832e98904978419f767963759596c11aca8"} Sep 29 14:00:09 crc kubenswrapper[4634]: E0929 14:00:09.039375 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:87a522d480797f54499bcd1c4a48837e1b17c33d4cc43e99ed7a53b8cedb17c7\\\"\"" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" podUID="e220a6dd-ab23-4eeb-9cb7-8496c72cc19f" Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.042851 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" event={"ID":"6cb280b6-d86c-42cb-8887-819b38c304b8","Type":"ContainerStarted","Data":"ae70d1957129b9a684a335f5ce6842d4ff0b1d3aa1b8790d46aeeaca84ea2941"} Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.044145 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" event={"ID":"ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df","Type":"ContainerStarted","Data":"1307003cabcd974a7adfa51f4034ca96063217e85ae226a772d774b871f9b0f2"} Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.047226 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" event={"ID":"b108e534-0a60-4d24-a6b3-9b967045469a","Type":"ContainerStarted","Data":"0e17fb8e3c4d6bddb199e60fc61f731d3758a4872a4cd6724b79883600747a61"} Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.047309 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" event={"ID":"b108e534-0a60-4d24-a6b3-9b967045469a","Type":"ContainerStarted","Data":"4eb3f5a92579f08524b01b064189e1f5f0084c60a54d71585e441f2919e9b941"} Sep 29 14:00:09 crc kubenswrapper[4634]: E0929 14:00:09.065523 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" podUID="b108e534-0a60-4d24-a6b3-9b967045469a" Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.072070 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" event={"ID":"65cef236-09ce-4623-9cd8-9d4c0e1f8346","Type":"ContainerStarted","Data":"8d6709a86ab8ebf13f15554a4e89b1cd5428964d3e7e6c6786d48c089e6aaf1f"} Sep 29 14:00:09 crc kubenswrapper[4634]: E0929 14:00:09.079029 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" podUID="45b61a8e-44b5-4cca-85b6-344738b51f52" Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.085010 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" event={"ID":"63fb32c1-31c4-4ab0-b10e-c467e2c74410","Type":"ContainerStarted","Data":"a58e096a6a0a7e0b78bc172868c33d2da26e41f2c3327873b366cc4cdc98e0ec"} Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.104382 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" event={"ID":"c5c15e4b-b806-4d39-915f-c6e60e6d72ea","Type":"ContainerStarted","Data":"8abc9fd1db1bc6d4ac07ebc82ec3fa5f0ddfe751703d866f18aeb8aef436c9ce"} Sep 29 14:00:09 crc kubenswrapper[4634]: E0929 14:00:09.110178 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" podUID="c5c15e4b-b806-4d39-915f-c6e60e6d72ea" Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.111482 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" event={"ID":"e6c834dc-3418-4d52-ade3-02c1043d6360","Type":"ContainerStarted","Data":"c571bd794e4c865d290ffca4c2a45c48d8698e78ed40f0d700020ee101624b60"} Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.113698 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" event={"ID":"97de340e-634f-47e2-8a37-800f2261e43b","Type":"ContainerStarted","Data":"8e3301320369c3b6fe5003042cf8ea779f8b6200b3027bbf5932b96175b44bb5"} Sep 29 14:00:09 crc kubenswrapper[4634]: E0929 14:00:09.128313 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:ae6fda8cafd6c3ab5d5e9c599d15b02ace61b8eacbac4de3df50427dfab6a0c0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" podUID="97de340e-634f-47e2-8a37-800f2261e43b" Sep 29 14:00:09 crc kubenswrapper[4634]: I0929 14:00:09.151400 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" event={"ID":"45b61a8e-44b5-4cca-85b6-344738b51f52","Type":"ContainerStarted","Data":"15947fc0593b52e5399f22fd2f5661e25dd4b98887c522ba13eeb503e9a7476b"} Sep 29 14:00:09 crc kubenswrapper[4634]: E0929 14:00:09.152791 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" podUID="45b61a8e-44b5-4cca-85b6-344738b51f52" Sep 29 14:00:10 crc kubenswrapper[4634]: I0929 14:00:10.208977 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" event={"ID":"45b61a8e-44b5-4cca-85b6-344738b51f52","Type":"ContainerStarted","Data":"d017c468e1de03fe3c00e4167fa62f601dc1586cd819a2bfb22e28d52e25d515"} Sep 29 14:00:10 crc kubenswrapper[4634]: E0929 14:00:10.211741 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" podUID="45b61a8e-44b5-4cca-85b6-344738b51f52" Sep 29 14:00:10 crc kubenswrapper[4634]: I0929 14:00:10.222463 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" event={"ID":"63fb32c1-31c4-4ab0-b10e-c467e2c74410","Type":"ContainerStarted","Data":"624e90ae80bb44912a880c91ba23fa29dd0653f9e251478518a82bcdd79da9c5"} Sep 29 14:00:10 crc kubenswrapper[4634]: I0929 14:00:10.222505 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" event={"ID":"63fb32c1-31c4-4ab0-b10e-c467e2c74410","Type":"ContainerStarted","Data":"3f1f40b9733ca6a2c36e9dbaa873ef58432f8c1b8450fd26ee3b4bd459188098"} Sep 29 14:00:10 crc kubenswrapper[4634]: I0929 14:00:10.223159 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:10 crc kubenswrapper[4634]: I0929 14:00:10.241903 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" event={"ID":"97de340e-634f-47e2-8a37-800f2261e43b","Type":"ContainerStarted","Data":"4d4e3cf86cc51f03b223e2344829c691f30eb11292aec3f8466120d53e1d5e9f"} Sep 29 14:00:10 crc kubenswrapper[4634]: E0929 14:00:10.246053 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:ae6fda8cafd6c3ab5d5e9c599d15b02ace61b8eacbac4de3df50427dfab6a0c0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" podUID="97de340e-634f-47e2-8a37-800f2261e43b" Sep 29 14:00:10 crc kubenswrapper[4634]: E0929 14:00:10.246465 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:87a522d480797f54499bcd1c4a48837e1b17c33d4cc43e99ed7a53b8cedb17c7\\\"\"" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" podUID="e220a6dd-ab23-4eeb-9cb7-8496c72cc19f" Sep 29 14:00:10 crc kubenswrapper[4634]: E0929 14:00:10.246525 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" podUID="b108e534-0a60-4d24-a6b3-9b967045469a" Sep 29 14:00:10 crc kubenswrapper[4634]: E0929 14:00:10.246562 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" podUID="c5c15e4b-b806-4d39-915f-c6e60e6d72ea" Sep 29 14:00:10 crc kubenswrapper[4634]: I0929 14:00:10.880932 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" podStartSLOduration=4.880915285 podStartE2EDuration="4.880915285s" podCreationTimestamp="2025-09-29 14:00:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:00:10.879439858 +0000 UTC m=+941.448167607" watchObservedRunningTime="2025-09-29 14:00:10.880915285 +0000 UTC m=+941.449643034" Sep 29 14:00:11 crc kubenswrapper[4634]: E0929 14:00:11.253738 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:ae6fda8cafd6c3ab5d5e9c599d15b02ace61b8eacbac4de3df50427dfab6a0c0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" podUID="97de340e-634f-47e2-8a37-800f2261e43b" Sep 29 14:00:11 crc kubenswrapper[4634]: E0929 14:00:11.254385 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" podUID="45b61a8e-44b5-4cca-85b6-344738b51f52" Sep 29 14:00:14 crc kubenswrapper[4634]: I0929 14:00:14.396250 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:00:14 crc kubenswrapper[4634]: I0929 14:00:14.396589 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:00:14 crc kubenswrapper[4634]: I0929 14:00:14.396637 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:00:14 crc kubenswrapper[4634]: I0929 14:00:14.397060 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"66e5f7cddcf0d5a52ca4459df7c3e5983f76e1e654e4c50e5ebc51cf61af5126"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:00:14 crc kubenswrapper[4634]: I0929 14:00:14.397132 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://66e5f7cddcf0d5a52ca4459df7c3e5983f76e1e654e4c50e5ebc51cf61af5126" gracePeriod=600 Sep 29 14:00:15 crc kubenswrapper[4634]: I0929 14:00:15.297292 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="66e5f7cddcf0d5a52ca4459df7c3e5983f76e1e654e4c50e5ebc51cf61af5126" exitCode=0 Sep 29 14:00:15 crc kubenswrapper[4634]: I0929 14:00:15.297380 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"66e5f7cddcf0d5a52ca4459df7c3e5983f76e1e654e4c50e5ebc51cf61af5126"} Sep 29 14:00:15 crc kubenswrapper[4634]: I0929 14:00:15.297649 4634 scope.go:117] "RemoveContainer" containerID="dbcc2440180d99fb45da22933b773ac34313e312284872b76ccc1d05c2cec895" Sep 29 14:00:18 crc kubenswrapper[4634]: I0929 14:00:18.146215 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-667746d855-fd8px" Sep 29 14:00:23 crc kubenswrapper[4634]: E0929 14:00:23.553425 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef" Sep 29 14:00:23 crc kubenswrapper[4634]: E0929 14:00:23.555301 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6c4mq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-c7c776c96-9z55w_openstack-operators(11b77d0f-14f2-47d2-839a-6e06505787a2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:00:25 crc kubenswrapper[4634]: E0929 14:00:25.975970 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:8961fc302c92bf476c1d00be0c02e964c449032f8d17672389cff40c71eeb1d3" Sep 29 14:00:25 crc kubenswrapper[4634]: E0929 14:00:25.977745 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:8961fc302c92bf476c1d00be0c02e964c449032f8d17672389cff40c71eeb1d3,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tpzcn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-67b5d44b7f-fzzjz_openstack-operators(12b1701c-523e-428c-817b-f0ae4914b9fb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:00:26 crc kubenswrapper[4634]: E0929 14:00:26.498100 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:206c77b3e1a1deb9012ed7635b4c485c6ca722137534909a4e1d700fd9a6293c" Sep 29 14:00:26 crc kubenswrapper[4634]: E0929 14:00:26.498418 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:206c77b3e1a1deb9012ed7635b4c485c6ca722137534909a4e1d700fd9a6293c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zq9x7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-6495d75b5-2nbgf_openstack-operators(7fb4797f-f58b-425a-a987-4559c9d5d481): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:00:29 crc kubenswrapper[4634]: E0929 14:00:29.643843 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:26db59a990341558d29c00da7503b2c5b9a415db8cc04a0006f198f30ec016d4" Sep 29 14:00:29 crc kubenswrapper[4634]: E0929 14:00:29.647315 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:26db59a990341558d29c00da7503b2c5b9a415db8cc04a0006f198f30ec016d4,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zkh4x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-5f95c46c78-2vb6t_openstack-operators(0808341c-4037-4360-bc34-dce11a7e8088): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:00:29 crc kubenswrapper[4634]: E0929 14:00:29.919847 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:d2eba62b82728578c57f60de5baa3562bc0a355f65123a9e5fedff385988eb64" Sep 29 14:00:29 crc kubenswrapper[4634]: E0929 14:00:29.920099 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:d2eba62b82728578c57f60de5baa3562bc0a355f65123a9e5fedff385988eb64,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j8dbr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-56cf9c6b99-bd4mn_openstack-operators(ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:00:30 crc kubenswrapper[4634]: E0929 14:00:30.383208 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:cd16b89f5e23d703fa0183db51a060d8d200bcfe2207b9bf565c73db6b5b9f03" Sep 29 14:00:30 crc kubenswrapper[4634]: E0929 14:00:30.383773 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:cd16b89f5e23d703fa0183db51a060d8d200bcfe2207b9bf565c73db6b5b9f03,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8z9f7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-8ff95898-b7s9w_openstack-operators(8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:00:32 crc kubenswrapper[4634]: E0929 14:00:32.758049 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8" Sep 29 14:00:32 crc kubenswrapper[4634]: E0929 14:00:32.759467 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fwr6n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-7c4mq_openstack-operators(65cef236-09ce-4623-9cd8-9d4c0e1f8346): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:00:37 crc kubenswrapper[4634]: E0929 14:00:37.980431 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" podUID="12b1701c-523e-428c-817b-f0ae4914b9fb" Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.317757 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" podUID="7fb4797f-f58b-425a-a987-4559c9d5d481" Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.318759 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" podUID="11b77d0f-14f2-47d2-839a-6e06505787a2" Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.321284 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" podUID="ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df" Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.422001 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" podUID="65cef236-09ce-4623-9cd8-9d4c0e1f8346" Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.423197 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" podUID="8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15" Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.541402 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" event={"ID":"fc9290c5-62eb-4b93-8b0f-032c2474510f","Type":"ContainerStarted","Data":"f53c2dc8fc934b29a261a2bd42d53575ea64eaceecaca02b2c799c882150a412"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.542866 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" event={"ID":"be9fbcb2-15d0-4fc2-b745-41178d406fca","Type":"ContainerStarted","Data":"487e149decd30d46eb930078d4c60a931254c45199389f2d7aac8eb9f3e27445"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.571498 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" event={"ID":"ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df","Type":"ContainerStarted","Data":"0029d87a05aa1155410117c721802453b6df71abc85b13f92c87f7398e5f2a43"} Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.603850 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:d2eba62b82728578c57f60de5baa3562bc0a355f65123a9e5fedff385988eb64\\\"\"" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" podUID="ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df" Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.616439 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" event={"ID":"d82e90ad-ac20-415a-9b7e-168e6472f2a8","Type":"ContainerStarted","Data":"140fbee6a85f0c6ba0bf31d977a1a315b8e8926de5d2939e7315418f7c13b272"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.626780 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" event={"ID":"0b0b3b6f-0579-4a42-bad2-ecbda8906426","Type":"ContainerStarted","Data":"5b46d931b9d0988ab1861b8c23e459907a47caeeafd8ca62e4d11738e027b090"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.643282 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" event={"ID":"e777128b-ae24-469f-81bb-adf78608f20e","Type":"ContainerStarted","Data":"04e1c59f395c019648581b05c75c0bd0082b89f11f4fd67dd0184d5a1c6c6284"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.679276 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" event={"ID":"12b1701c-523e-428c-817b-f0ae4914b9fb","Type":"ContainerStarted","Data":"7b271c28afa28683d91bc26bfdd514678e42e72c5f5750289227cf665a2fd250"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.699281 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" event={"ID":"e6c834dc-3418-4d52-ade3-02c1043d6360","Type":"ContainerStarted","Data":"81bfa8500e394223954f93aa8a3f3533a6ca5cf8b856f04f5218818ad35d0411"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.717651 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" event={"ID":"97de340e-634f-47e2-8a37-800f2261e43b","Type":"ContainerStarted","Data":"1968874142f270c514d1a7629795a0dc4ca8e0a436395aa3c5cd5cb7a0dfebd4"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.718296 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.719818 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" event={"ID":"c00a2f33-36be-4039-a5ae-73df39f84d1d","Type":"ContainerStarted","Data":"919f19cd61d3a49faea817888821973823bc169cd27b7ce90aa10bd6be738e12"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.719843 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" event={"ID":"c00a2f33-36be-4039-a5ae-73df39f84d1d","Type":"ContainerStarted","Data":"7a9dc9267785f93462c0795d0f085c6315c34d76a3d9e9674fc5b1c334f68a94"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.720228 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.739574 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" event={"ID":"7fb4797f-f58b-425a-a987-4559c9d5d481","Type":"ContainerStarted","Data":"211c0685829a215cff1a36efc63f55a6f3e933f5e4d6326fc15083308bed6c18"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.749689 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" event={"ID":"65cef236-09ce-4623-9cd8-9d4c0e1f8346","Type":"ContainerStarted","Data":"0e20dc85745c49370407e1fd33b4178ee7c61a2a1f96d789c4bd3c89576a8294"} Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.751181 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" podUID="65cef236-09ce-4623-9cd8-9d4c0e1f8346" Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.764882 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" event={"ID":"8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15","Type":"ContainerStarted","Data":"4b2f1157a498fa6025ea5e518d6c7771436aed062e06b203c8eaa041c29374e6"} Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.768953 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:cd16b89f5e23d703fa0183db51a060d8d200bcfe2207b9bf565c73db6b5b9f03\\\"\"" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" podUID="8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15" Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.776516 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" event={"ID":"3fce3aee-b45a-4d80-a2e5-529632ed8a2d","Type":"ContainerStarted","Data":"085082bd0c448e8422a7456d0b0728c50d9203dc1eeeecc534e84f54ea9764f5"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.804903 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"57b4d47644425468a03fbc283811a82747ba711e9f6742c5de405a2bc380e087"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.810692 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" event={"ID":"11b77d0f-14f2-47d2-839a-6e06505787a2","Type":"ContainerStarted","Data":"5ec5c7f2b5eb006d64a1c1ccad7728ad6fd76b7a5844345fa9c86a6b0ac87fc9"} Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.817577 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" podStartSLOduration=6.747136347 podStartE2EDuration="33.817551359s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.76450898 +0000 UTC m=+938.333236729" lastFinishedPulling="2025-09-29 14:00:34.834923982 +0000 UTC m=+965.403651741" observedRunningTime="2025-09-29 14:00:38.769058486 +0000 UTC m=+969.337786235" watchObservedRunningTime="2025-09-29 14:00:38.817551359 +0000 UTC m=+969.386279108" Sep 29 14:00:38 crc kubenswrapper[4634]: I0929 14:00:38.860530 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" podStartSLOduration=4.571210021 podStartE2EDuration="33.860505436s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:08.261631961 +0000 UTC m=+938.830359710" lastFinishedPulling="2025-09-29 14:00:37.550927336 +0000 UTC m=+968.119655125" observedRunningTime="2025-09-29 14:00:38.83506333 +0000 UTC m=+969.403791079" watchObservedRunningTime="2025-09-29 14:00:38.860505436 +0000 UTC m=+969.429233185" Sep 29 14:00:38 crc kubenswrapper[4634]: E0929 14:00:38.967993 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" podUID="0808341c-4037-4360-bc34-dce11a7e8088" Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.849660 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" event={"ID":"b108e534-0a60-4d24-a6b3-9b967045469a","Type":"ContainerStarted","Data":"7601ae5807dc7d69a462acbca0f7f65abbe3221be2f101875c8288135d2db8c4"} Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.850355 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.852792 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" event={"ID":"e220a6dd-ab23-4eeb-9cb7-8496c72cc19f","Type":"ContainerStarted","Data":"271f9cea02abc2e54323acfc2943bd87cb4245e232beff034465f859f0af0494"} Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.853028 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.857851 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" event={"ID":"c5c15e4b-b806-4d39-915f-c6e60e6d72ea","Type":"ContainerStarted","Data":"ffcb0df564cf7b0bb386b372dfe4fe0d0f941df9b8e3d0c602fe78def488d941"} Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.859293 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" event={"ID":"6cb280b6-d86c-42cb-8887-819b38c304b8","Type":"ContainerStarted","Data":"2dd5f76f94d5985b40ef8262f6a48e5fdb30d0d5e408020045361b4e9c789c2e"} Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.861291 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" event={"ID":"45b61a8e-44b5-4cca-85b6-344738b51f52","Type":"ContainerStarted","Data":"da4c44d1f345bb2f140d1309720aaff19b297244fbdbcb0ad58696b27594a10f"} Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.861545 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.863785 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" event={"ID":"0808341c-4037-4360-bc34-dce11a7e8088","Type":"ContainerStarted","Data":"515fbcc351b081fddf66692c1aa044a973920d271a2ff2a986991272dad6bce8"} Sep 29 14:00:39 crc kubenswrapper[4634]: I0929 14:00:39.874245 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" event={"ID":"ba50e2d0-3018-4591-81fd-9e31c5d39951","Type":"ContainerStarted","Data":"394c14c1eb87fd9daf9e28033d9323317079e00501b92c7111f24d79bacca762"} Sep 29 14:00:39 crc kubenswrapper[4634]: E0929 14:00:39.876297 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" podUID="65cef236-09ce-4623-9cd8-9d4c0e1f8346" Sep 29 14:00:39 crc kubenswrapper[4634]: E0929 14:00:39.881294 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:cd16b89f5e23d703fa0183db51a060d8d200bcfe2207b9bf565c73db6b5b9f03\\\"\"" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" podUID="8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15" Sep 29 14:00:39 crc kubenswrapper[4634]: E0929 14:00:39.883783 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:d2eba62b82728578c57f60de5baa3562bc0a355f65123a9e5fedff385988eb64\\\"\"" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" podUID="ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df" Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.054464 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" podStartSLOduration=5.424804283 podStartE2EDuration="35.054442401s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.997238456 +0000 UTC m=+938.565966205" lastFinishedPulling="2025-09-29 14:00:37.626876534 +0000 UTC m=+968.195604323" observedRunningTime="2025-09-29 14:00:39.98856067 +0000 UTC m=+970.557288409" watchObservedRunningTime="2025-09-29 14:00:40.054442401 +0000 UTC m=+970.623170160" Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.178795 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" podStartSLOduration=5.848195269 podStartE2EDuration="35.1787736s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:08.240120201 +0000 UTC m=+938.808847950" lastFinishedPulling="2025-09-29 14:00:37.570698532 +0000 UTC m=+968.139426281" observedRunningTime="2025-09-29 14:00:40.129249462 +0000 UTC m=+970.697977211" watchObservedRunningTime="2025-09-29 14:00:40.1787736 +0000 UTC m=+970.747501349" Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.314093 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" podStartSLOduration=5.94945152 podStartE2EDuration="35.314052279s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:08.261437485 +0000 UTC m=+938.830165234" lastFinishedPulling="2025-09-29 14:00:37.626038254 +0000 UTC m=+968.194765993" observedRunningTime="2025-09-29 14:00:40.306625006 +0000 UTC m=+970.875352755" watchObservedRunningTime="2025-09-29 14:00:40.314052279 +0000 UTC m=+970.882780028" Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.882686 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" event={"ID":"e777128b-ae24-469f-81bb-adf78608f20e","Type":"ContainerStarted","Data":"6a49139fa19b55bb788fdd43f589d72ced4b102f63fa942aa11d87ad32083b37"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.884368 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" event={"ID":"e6c834dc-3418-4d52-ade3-02c1043d6360","Type":"ContainerStarted","Data":"1e428d2d8cd7ecd4897e8afbb7ffba69c65e696e12d3c4866465780687ec0870"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.885946 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" event={"ID":"fc9290c5-62eb-4b93-8b0f-032c2474510f","Type":"ContainerStarted","Data":"26818e34f56b1085ae1fc4891427f106323a7cd655c904ce2fee28850007b4aa"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.886076 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.887581 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" event={"ID":"6cb280b6-d86c-42cb-8887-819b38c304b8","Type":"ContainerStarted","Data":"0e77e018b27882432591ce0b0dc54a8f4e59cab2a31741de688ba0b15d857595"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.889187 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" event={"ID":"ba50e2d0-3018-4591-81fd-9e31c5d39951","Type":"ContainerStarted","Data":"203abe11b759de11146c3041423ca19919eaac370c9d171126d386ae98a2bbf6"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.891029 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" event={"ID":"3fce3aee-b45a-4d80-a2e5-529632ed8a2d","Type":"ContainerStarted","Data":"4adc927bc6c95cb123819d5cacfeabf0d6117a357e6f7a15aecee25e7300505b"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.892760 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" event={"ID":"be9fbcb2-15d0-4fc2-b745-41178d406fca","Type":"ContainerStarted","Data":"284776d53e5797f0cd5b0a0ec541c04e39abff54bca24e3ae845fe49e16c0f5b"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.894446 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" event={"ID":"d82e90ad-ac20-415a-9b7e-168e6472f2a8","Type":"ContainerStarted","Data":"ab44cc5401ebb96f6aa95d12c74337c5b83cce409435585da06a2ee5ef7539f0"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.896503 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" event={"ID":"0b0b3b6f-0579-4a42-bad2-ecbda8906426","Type":"ContainerStarted","Data":"b5420df0fdd81fcdf03b53ba59dfb14f02d50414535f4a5a5e9b1a6f0531b9e4"} Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.913433 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" podStartSLOduration=7.952239637 podStartE2EDuration="35.913410405s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.386401427 +0000 UTC m=+937.955129176" lastFinishedPulling="2025-09-29 14:00:35.347572195 +0000 UTC m=+965.916299944" observedRunningTime="2025-09-29 14:00:40.91033718 +0000 UTC m=+971.479064929" watchObservedRunningTime="2025-09-29 14:00:40.913410405 +0000 UTC m=+971.482138154" Sep 29 14:00:40 crc kubenswrapper[4634]: I0929 14:00:40.915171 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-cvk6x" podStartSLOduration=5.401103545 podStartE2EDuration="34.915166269s" podCreationTimestamp="2025-09-29 14:00:06 +0000 UTC" firstStartedPulling="2025-09-29 14:00:08.253258125 +0000 UTC m=+938.821985874" lastFinishedPulling="2025-09-29 14:00:37.767320849 +0000 UTC m=+968.336048598" observedRunningTime="2025-09-29 14:00:40.348459765 +0000 UTC m=+970.917187514" watchObservedRunningTime="2025-09-29 14:00:40.915166269 +0000 UTC m=+971.483894018" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.910152 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" event={"ID":"12b1701c-523e-428c-817b-f0ae4914b9fb","Type":"ContainerStarted","Data":"a4aa75e1a9481c5954f3d9a30e0b74d5a9181a1691b1cb63680eaa2e420cc57c"} Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.910459 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.912371 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" event={"ID":"0808341c-4037-4360-bc34-dce11a7e8088","Type":"ContainerStarted","Data":"455a56053667239c6eaec98580b797e26141e2a9582467042328ebf1384f8aa0"} Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.912461 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.919575 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" event={"ID":"7fb4797f-f58b-425a-a987-4559c9d5d481","Type":"ContainerStarted","Data":"cc914f9137c97a43943aafa340ad5495c20c7cfc9200c034fde9dde471896340"} Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.919733 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.925731 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" event={"ID":"11b77d0f-14f2-47d2-839a-6e06505787a2","Type":"ContainerStarted","Data":"81abaf5b9def8532689b6a4c5c26b9edd10d9986b6a05915e0cd1201a8ab93e6"} Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.925797 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.925815 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.926277 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.926646 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.926999 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.927368 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.927755 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.928595 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" Sep 29 14:00:42 crc kubenswrapper[4634]: I0929 14:00:42.928636 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.044902 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" podStartSLOduration=2.837411463 podStartE2EDuration="38.044886808s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.143013888 +0000 UTC m=+937.711741637" lastFinishedPulling="2025-09-29 14:00:42.350489233 +0000 UTC m=+972.919216982" observedRunningTime="2025-09-29 14:00:42.969959274 +0000 UTC m=+973.538687023" watchObservedRunningTime="2025-09-29 14:00:43.044886808 +0000 UTC m=+973.613614557" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.046578 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" podStartSLOduration=11.217496725 podStartE2EDuration="38.04657245s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:08.51726404 +0000 UTC m=+939.085991789" lastFinishedPulling="2025-09-29 14:00:35.346339765 +0000 UTC m=+965.915067514" observedRunningTime="2025-09-29 14:00:43.037437634 +0000 UTC m=+973.606165383" watchObservedRunningTime="2025-09-29 14:00:43.04657245 +0000 UTC m=+973.615300189" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.164114 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" podStartSLOduration=10.36336363 podStartE2EDuration="38.164095401s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.545493541 +0000 UTC m=+938.114221290" lastFinishedPulling="2025-09-29 14:00:35.346225312 +0000 UTC m=+965.914953061" observedRunningTime="2025-09-29 14:00:43.091981347 +0000 UTC m=+973.660709096" watchObservedRunningTime="2025-09-29 14:00:43.164095401 +0000 UTC m=+973.732823150" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.218069 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" podStartSLOduration=2.738901759 podStartE2EDuration="38.218049348s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.002337587 +0000 UTC m=+937.571065326" lastFinishedPulling="2025-09-29 14:00:42.481485166 +0000 UTC m=+973.050212915" observedRunningTime="2025-09-29 14:00:43.212937652 +0000 UTC m=+973.781665401" watchObservedRunningTime="2025-09-29 14:00:43.218049348 +0000 UTC m=+973.786777097" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.219244 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" podStartSLOduration=10.64425161 podStartE2EDuration="38.219238177s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.7710277 +0000 UTC m=+938.339755459" lastFinishedPulling="2025-09-29 14:00:35.346014277 +0000 UTC m=+965.914742026" observedRunningTime="2025-09-29 14:00:43.170735004 +0000 UTC m=+973.739462743" watchObservedRunningTime="2025-09-29 14:00:43.219238177 +0000 UTC m=+973.787965926" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.242958 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" podStartSLOduration=3.487735893 podStartE2EDuration="38.242943451s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.764581811 +0000 UTC m=+938.333309560" lastFinishedPulling="2025-09-29 14:00:42.519789359 +0000 UTC m=+973.088517118" observedRunningTime="2025-09-29 14:00:43.235124088 +0000 UTC m=+973.803851837" watchObservedRunningTime="2025-09-29 14:00:43.242943451 +0000 UTC m=+973.811671200" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.331176 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" podStartSLOduration=10.451319404 podStartE2EDuration="38.331159041s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.467296447 +0000 UTC m=+938.036024196" lastFinishedPulling="2025-09-29 14:00:35.347136084 +0000 UTC m=+965.915863833" observedRunningTime="2025-09-29 14:00:43.275839 +0000 UTC m=+973.844566749" watchObservedRunningTime="2025-09-29 14:00:43.331159041 +0000 UTC m=+973.899886790" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.406540 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" podStartSLOduration=11.714665207 podStartE2EDuration="38.406524755s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.406522322 +0000 UTC m=+937.975250071" lastFinishedPulling="2025-09-29 14:00:34.09838186 +0000 UTC m=+964.667109619" observedRunningTime="2025-09-29 14:00:43.405030729 +0000 UTC m=+973.973758478" watchObservedRunningTime="2025-09-29 14:00:43.406524755 +0000 UTC m=+973.975252504" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.409438 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" podStartSLOduration=11.302913597 podStartE2EDuration="38.409428877s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:08.239800404 +0000 UTC m=+938.808528153" lastFinishedPulling="2025-09-29 14:00:35.346315684 +0000 UTC m=+965.915043433" observedRunningTime="2025-09-29 14:00:43.332950275 +0000 UTC m=+973.901678024" watchObservedRunningTime="2025-09-29 14:00:43.409428877 +0000 UTC m=+973.978156626" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.445767 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" podStartSLOduration=10.632161904 podStartE2EDuration="38.44575237s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.021319395 +0000 UTC m=+937.590047144" lastFinishedPulling="2025-09-29 14:00:34.834909851 +0000 UTC m=+965.403637610" observedRunningTime="2025-09-29 14:00:43.439719102 +0000 UTC m=+974.008446851" watchObservedRunningTime="2025-09-29 14:00:43.44575237 +0000 UTC m=+974.014480119" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.474505 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" podStartSLOduration=11.166044308 podStartE2EDuration="38.474491027s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:08.039802073 +0000 UTC m=+938.608529822" lastFinishedPulling="2025-09-29 14:00:35.348248792 +0000 UTC m=+965.916976541" observedRunningTime="2025-09-29 14:00:43.470058599 +0000 UTC m=+974.038786348" watchObservedRunningTime="2025-09-29 14:00:43.474491027 +0000 UTC m=+974.043218776" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.519262 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" podStartSLOduration=3.769326901 podStartE2EDuration="38.519247138s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.774564967 +0000 UTC m=+938.343292716" lastFinishedPulling="2025-09-29 14:00:42.524485204 +0000 UTC m=+973.093212953" observedRunningTime="2025-09-29 14:00:43.503485291 +0000 UTC m=+974.072213040" watchObservedRunningTime="2025-09-29 14:00:43.519247138 +0000 UTC m=+974.087974887" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.938890 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7bf498966c-9l547" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.939786 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-695847bc78-g6ncd" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.939889 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-knb5c" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.939974 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-687b9cf756-hd2mv" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.940551 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-774b97b48-66mbm" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.940744 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-54d766c9f9-k2dqf" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.941116 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-748c574d75-h2wqv" Sep 29 14:00:43 crc kubenswrapper[4634]: I0929 14:00:43.944166 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" Sep 29 14:00:45 crc kubenswrapper[4634]: I0929 14:00:45.515531 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-7d74f4d695-w2nj4" Sep 29 14:00:46 crc kubenswrapper[4634]: I0929 14:00:46.036437 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-9fc8d5567-c68h7" Sep 29 14:00:46 crc kubenswrapper[4634]: I0929 14:00:46.504327 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-6gkh8" Sep 29 14:00:46 crc kubenswrapper[4634]: I0929 14:00:46.535384 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5bf96cfbc4-2dp75" Sep 29 14:00:46 crc kubenswrapper[4634]: I0929 14:00:46.595000 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-7w4vx" Sep 29 14:00:47 crc kubenswrapper[4634]: I0929 14:00:47.169075 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-858cd69f49-7v24f" Sep 29 14:00:51 crc kubenswrapper[4634]: I0929 14:00:51.113282 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:00:53 crc kubenswrapper[4634]: I0929 14:00:53.031058 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" event={"ID":"ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df","Type":"ContainerStarted","Data":"bdf40e992bc1e8c0fa969214bbf66f0d50da660018667ac7f0aa30af97a3be01"} Sep 29 14:00:53 crc kubenswrapper[4634]: I0929 14:00:53.031993 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" Sep 29 14:00:53 crc kubenswrapper[4634]: I0929 14:00:53.032908 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" event={"ID":"8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15","Type":"ContainerStarted","Data":"2c627dbd997fb18fadbfc3c21d41e89051ff2e22e9b5fedf5f8978f389f38832"} Sep 29 14:00:53 crc kubenswrapper[4634]: I0929 14:00:53.033221 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" Sep 29 14:00:53 crc kubenswrapper[4634]: I0929 14:00:53.058993 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" podStartSLOduration=3.997217459 podStartE2EDuration="48.058965662s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.972310582 +0000 UTC m=+938.541038331" lastFinishedPulling="2025-09-29 14:00:52.034058765 +0000 UTC m=+982.602786534" observedRunningTime="2025-09-29 14:00:53.052283517 +0000 UTC m=+983.621011266" watchObservedRunningTime="2025-09-29 14:00:53.058965662 +0000 UTC m=+983.627693421" Sep 29 14:00:54 crc kubenswrapper[4634]: I0929 14:00:54.041629 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" event={"ID":"65cef236-09ce-4623-9cd8-9d4c0e1f8346","Type":"ContainerStarted","Data":"346e2de65db775941f3dcc325b197123fd21f945ab7221c6b78f110af55edad1"} Sep 29 14:00:54 crc kubenswrapper[4634]: I0929 14:00:54.042611 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" Sep 29 14:00:54 crc kubenswrapper[4634]: I0929 14:00:54.066192 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" podStartSLOduration=3.4068472229999998 podStartE2EDuration="49.066174442s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.968025547 +0000 UTC m=+938.536753296" lastFinishedPulling="2025-09-29 14:00:53.627352766 +0000 UTC m=+984.196080515" observedRunningTime="2025-09-29 14:00:54.065354783 +0000 UTC m=+984.634082542" watchObservedRunningTime="2025-09-29 14:00:54.066174442 +0000 UTC m=+984.634902191" Sep 29 14:00:54 crc kubenswrapper[4634]: I0929 14:00:54.070411 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" podStartSLOduration=4.019528619 podStartE2EDuration="49.070394977s" podCreationTimestamp="2025-09-29 14:00:05 +0000 UTC" firstStartedPulling="2025-09-29 14:00:07.368388444 +0000 UTC m=+937.937116193" lastFinishedPulling="2025-09-29 14:00:52.419254802 +0000 UTC m=+982.987982551" observedRunningTime="2025-09-29 14:00:53.079946857 +0000 UTC m=+983.648674606" watchObservedRunningTime="2025-09-29 14:00:54.070394977 +0000 UTC m=+984.639122726" Sep 29 14:00:55 crc kubenswrapper[4634]: I0929 14:00:55.434999 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6495d75b5-2nbgf" Sep 29 14:00:55 crc kubenswrapper[4634]: I0929 14:00:55.547295 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-67b5d44b7f-fzzjz" Sep 29 14:00:56 crc kubenswrapper[4634]: I0929 14:00:56.063493 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-9z55w" Sep 29 14:00:56 crc kubenswrapper[4634]: I0929 14:00:56.201900 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-5f95c46c78-2vb6t" Sep 29 14:01:05 crc kubenswrapper[4634]: I0929 14:01:05.879329 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-8ff95898-b7s9w" Sep 29 14:01:05 crc kubenswrapper[4634]: I0929 14:01:05.960553 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-56cf9c6b99-bd4mn" Sep 29 14:01:06 crc kubenswrapper[4634]: I0929 14:01:06.138719 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-7c4mq" Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.877829 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtjjt"] Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.879941 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.902373 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.902543 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.902547 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.903403 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-bjc9k" Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.919989 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtjjt"] Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.929692 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fae886-50be-4e2c-8fcc-89e6b86646e2-config\") pod \"dnsmasq-dns-675f4bcbfc-jtjjt\" (UID: \"73fae886-50be-4e2c-8fcc-89e6b86646e2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:01:25 crc kubenswrapper[4634]: I0929 14:01:25.929913 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcthv\" (UniqueName: \"kubernetes.io/projected/73fae886-50be-4e2c-8fcc-89e6b86646e2-kube-api-access-zcthv\") pod \"dnsmasq-dns-675f4bcbfc-jtjjt\" (UID: \"73fae886-50be-4e2c-8fcc-89e6b86646e2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.031752 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fae886-50be-4e2c-8fcc-89e6b86646e2-config\") pod \"dnsmasq-dns-675f4bcbfc-jtjjt\" (UID: \"73fae886-50be-4e2c-8fcc-89e6b86646e2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.031857 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcthv\" (UniqueName: \"kubernetes.io/projected/73fae886-50be-4e2c-8fcc-89e6b86646e2-kube-api-access-zcthv\") pod \"dnsmasq-dns-675f4bcbfc-jtjjt\" (UID: \"73fae886-50be-4e2c-8fcc-89e6b86646e2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.032787 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fae886-50be-4e2c-8fcc-89e6b86646e2-config\") pod \"dnsmasq-dns-675f4bcbfc-jtjjt\" (UID: \"73fae886-50be-4e2c-8fcc-89e6b86646e2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.064307 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t8mqw"] Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.065523 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: W0929 14:01:26.068487 4634 reflector.go:561] object-"openstack"/"dns-svc": failed to list *v1.ConfigMap: configmaps "dns-svc" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Sep 29 14:01:26 crc kubenswrapper[4634]: E0929 14:01:26.068545 4634 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"dns-svc\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"dns-svc\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.077758 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcthv\" (UniqueName: \"kubernetes.io/projected/73fae886-50be-4e2c-8fcc-89e6b86646e2-kube-api-access-zcthv\") pod \"dnsmasq-dns-675f4bcbfc-jtjjt\" (UID: \"73fae886-50be-4e2c-8fcc-89e6b86646e2\") " pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.129457 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t8mqw"] Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.133283 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.133425 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xct5g\" (UniqueName: \"kubernetes.io/projected/6588e5a3-1aea-4503-b54e-68cda952b226-kube-api-access-xct5g\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.133597 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-config\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.199512 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.235218 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-config\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.235286 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.235309 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xct5g\" (UniqueName: \"kubernetes.io/projected/6588e5a3-1aea-4503-b54e-68cda952b226-kube-api-access-xct5g\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.236401 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-config\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.274964 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xct5g\" (UniqueName: \"kubernetes.io/projected/6588e5a3-1aea-4503-b54e-68cda952b226-kube-api-access-xct5g\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:26 crc kubenswrapper[4634]: I0929 14:01:26.789933 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtjjt"] Sep 29 14:01:27 crc kubenswrapper[4634]: E0929 14:01:27.236026 4634 configmap.go:193] Couldn't get configMap openstack/dns-svc: failed to sync configmap cache: timed out waiting for the condition Sep 29 14:01:27 crc kubenswrapper[4634]: E0929 14:01:27.236191 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc podName:6588e5a3-1aea-4503-b54e-68cda952b226 nodeName:}" failed. No retries permitted until 2025-09-29 14:01:27.736162968 +0000 UTC m=+1018.304890727 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "dns-svc" (UniqueName: "kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc") pod "dnsmasq-dns-78dd6ddcc-t8mqw" (UID: "6588e5a3-1aea-4503-b54e-68cda952b226") : failed to sync configmap cache: timed out waiting for the condition Sep 29 14:01:27 crc kubenswrapper[4634]: I0929 14:01:27.361731 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" event={"ID":"73fae886-50be-4e2c-8fcc-89e6b86646e2","Type":"ContainerStarted","Data":"5d040cc8d78ba850c95e21d08c4a83868db2f982574070cf7656e748aadd5184"} Sep 29 14:01:27 crc kubenswrapper[4634]: I0929 14:01:27.548136 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 14:01:27 crc kubenswrapper[4634]: I0929 14:01:27.766143 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:27 crc kubenswrapper[4634]: I0929 14:01:27.768472 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-t8mqw\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:27 crc kubenswrapper[4634]: I0929 14:01:27.931624 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:01:28 crc kubenswrapper[4634]: I0929 14:01:28.500080 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t8mqw"] Sep 29 14:01:28 crc kubenswrapper[4634]: W0929 14:01:28.522531 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6588e5a3_1aea_4503_b54e_68cda952b226.slice/crio-80128d1ee78889825d37be86a15af28469500395a245ee5fb6d834bc8038ec75 WatchSource:0}: Error finding container 80128d1ee78889825d37be86a15af28469500395a245ee5fb6d834bc8038ec75: Status 404 returned error can't find the container with id 80128d1ee78889825d37be86a15af28469500395a245ee5fb6d834bc8038ec75 Sep 29 14:01:28 crc kubenswrapper[4634]: I0929 14:01:28.791707 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtjjt"] Sep 29 14:01:28 crc kubenswrapper[4634]: I0929 14:01:28.835222 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jlpdn"] Sep 29 14:01:28 crc kubenswrapper[4634]: I0929 14:01:28.836698 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:28 crc kubenswrapper[4634]: I0929 14:01:28.853817 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jlpdn"] Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.000313 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.000379 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwnlv\" (UniqueName: \"kubernetes.io/projected/a5684b31-5b07-4e66-a682-b958a54a51a6-kube-api-access-dwnlv\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.000422 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-config\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.101975 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-config\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.102247 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.102289 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwnlv\" (UniqueName: \"kubernetes.io/projected/a5684b31-5b07-4e66-a682-b958a54a51a6-kube-api-access-dwnlv\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.103430 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-config\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.103622 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.173001 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwnlv\" (UniqueName: \"kubernetes.io/projected/a5684b31-5b07-4e66-a682-b958a54a51a6-kube-api-access-dwnlv\") pod \"dnsmasq-dns-5ccc8479f9-jlpdn\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.410337 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" event={"ID":"6588e5a3-1aea-4503-b54e-68cda952b226","Type":"ContainerStarted","Data":"80128d1ee78889825d37be86a15af28469500395a245ee5fb6d834bc8038ec75"} Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.414602 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t8mqw"] Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.465526 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.485276 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rhvml"] Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.486991 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.519114 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-config\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.519199 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8jqt\" (UniqueName: \"kubernetes.io/projected/578512a9-ebbb-4869-a3d6-b9a09298eeec-kube-api-access-j8jqt\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.519303 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.622123 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.622197 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-config\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.622236 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8jqt\" (UniqueName: \"kubernetes.io/projected/578512a9-ebbb-4869-a3d6-b9a09298eeec-kube-api-access-j8jqt\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.623344 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.623873 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-config\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.699139 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8jqt\" (UniqueName: \"kubernetes.io/projected/578512a9-ebbb-4869-a3d6-b9a09298eeec-kube-api-access-j8jqt\") pod \"dnsmasq-dns-57d769cc4f-rhvml\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.714958 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rhvml"] Sep 29 14:01:29 crc kubenswrapper[4634]: I0929 14:01:29.885180 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.053017 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.063728 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.066741 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.066998 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.068794 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dm6zl" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.068993 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.069174 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.069996 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.070348 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.102139 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.244938 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245022 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245044 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245101 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245123 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245142 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpnmv\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-kube-api-access-hpnmv\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245177 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245196 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245212 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245234 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.245253 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347046 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpnmv\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-kube-api-access-hpnmv\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347132 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347161 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347183 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347211 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347225 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347252 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347305 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347323 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347355 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.347375 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.348254 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.355272 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.356586 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.356778 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.359574 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.369770 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.376006 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.379503 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.380927 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.387817 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.407238 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpnmv\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-kube-api-access-hpnmv\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.422599 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.512760 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rhvml"] Sep 29 14:01:30 crc kubenswrapper[4634]: W0929 14:01:30.536814 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod578512a9_ebbb_4869_a3d6_b9a09298eeec.slice/crio-704c966cc9fe177c4cc2f3acaf464d750288878f28a6a14b894038c7ece3b935 WatchSource:0}: Error finding container 704c966cc9fe177c4cc2f3acaf464d750288878f28a6a14b894038c7ece3b935: Status 404 returned error can't find the container with id 704c966cc9fe177c4cc2f3acaf464d750288878f28a6a14b894038c7ece3b935 Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.551965 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jlpdn"] Sep 29 14:01:30 crc kubenswrapper[4634]: W0929 14:01:30.566711 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5684b31_5b07_4e66_a682_b958a54a51a6.slice/crio-69126df4c6d7741e59a20fd017da6ce48db1cdbf4cc0891f41a621bad2b4e1b6 WatchSource:0}: Error finding container 69126df4c6d7741e59a20fd017da6ce48db1cdbf4cc0891f41a621bad2b4e1b6: Status 404 returned error can't find the container with id 69126df4c6d7741e59a20fd017da6ce48db1cdbf4cc0891f41a621bad2b4e1b6 Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.701443 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.793616 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.804664 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.837958 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4mq6l" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.838317 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.840037 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.840916 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.841214 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.842080 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.847843 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.848252 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983193 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983249 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983290 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983311 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983340 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983369 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2skm\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-kube-api-access-c2skm\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983396 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-config-data\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983410 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983432 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983456 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:30 crc kubenswrapper[4634]: I0929 14:01:30.983477 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084543 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084630 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084657 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084692 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084718 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084751 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084772 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084798 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084820 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2skm\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-kube-api-access-c2skm\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084844 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-config-data\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.084867 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.086179 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.086444 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.089330 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-config-data\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.089435 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-server-conf\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.089875 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.091023 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.097200 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.097704 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-pod-info\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.098241 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.102409 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.130876 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2skm\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-kube-api-access-c2skm\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.146936 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.400073 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.434430 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" event={"ID":"a5684b31-5b07-4e66-a682-b958a54a51a6","Type":"ContainerStarted","Data":"69126df4c6d7741e59a20fd017da6ce48db1cdbf4cc0891f41a621bad2b4e1b6"} Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.437455 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" event={"ID":"578512a9-ebbb-4869-a3d6-b9a09298eeec","Type":"ContainerStarted","Data":"704c966cc9fe177c4cc2f3acaf464d750288878f28a6a14b894038c7ece3b935"} Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.462385 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 14:01:31 crc kubenswrapper[4634]: I0929 14:01:31.928799 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.015907 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.019778 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.027785 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.028635 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.028896 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.029074 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.029215 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-8jl85" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.040658 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.062231 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108191 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108284 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxng4\" (UniqueName: \"kubernetes.io/projected/861151f8-60ad-449e-80fa-b1b64e5c5b3e-kube-api-access-bxng4\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108336 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108401 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108442 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/861151f8-60ad-449e-80fa-b1b64e5c5b3e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108469 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108495 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-kolla-config\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108559 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-config-data-default\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.108592 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-secrets\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.210435 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxng4\" (UniqueName: \"kubernetes.io/projected/861151f8-60ad-449e-80fa-b1b64e5c5b3e-kube-api-access-bxng4\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.210488 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.210530 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.210561 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/861151f8-60ad-449e-80fa-b1b64e5c5b3e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.210613 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.210670 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-kolla-config\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.210706 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-config-data-default\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.211040 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-secrets\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.211485 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.212002 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.217416 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/861151f8-60ad-449e-80fa-b1b64e5c5b3e-config-data-generated\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.220154 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-config-data-default\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.221711 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-operator-scripts\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.224392 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/861151f8-60ad-449e-80fa-b1b64e5c5b3e-kolla-config\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.231898 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-secrets\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.234582 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.241497 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/861151f8-60ad-449e-80fa-b1b64e5c5b3e-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.248891 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxng4\" (UniqueName: \"kubernetes.io/projected/861151f8-60ad-449e-80fa-b1b64e5c5b3e-kube-api-access-bxng4\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.283554 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"861151f8-60ad-449e-80fa-b1b64e5c5b3e\") " pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.376762 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.478518 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8efec8a2-4905-4ba0-b777-d4e2cd393bd6","Type":"ContainerStarted","Data":"ed85ee231c08597128292d93a4b45120124c478c093809e98205b6e341b55dd0"} Sep 29 14:01:32 crc kubenswrapper[4634]: I0929 14:01:32.492701 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd3a9c91-300c-4510-b7a4-03cf8cbbe729","Type":"ContainerStarted","Data":"1abbf3385ea639118b9fbee2443f63b1ed522e2a47f46814ebb5383a5b15aa5f"} Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.195656 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.349047 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.362955 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.393487 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.393653 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.393872 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-c9zcf" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.393994 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.394155 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554117 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554187 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554245 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554291 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554311 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554336 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554344 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"861151f8-60ad-449e-80fa-b1b64e5c5b3e","Type":"ContainerStarted","Data":"d93a586e2cddfe4269070e28af6ca98c6dcb65c776d4b3c90a2a6ea576a21702"} Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554391 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5dcf49a2-dd23-4b67-9f54-4659168f4f18-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554428 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.554452 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29gvf\" (UniqueName: \"kubernetes.io/projected/5dcf49a2-dd23-4b67-9f54-4659168f4f18-kube-api-access-29gvf\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.658884 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.658973 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.658993 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.659015 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.659061 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5dcf49a2-dd23-4b67-9f54-4659168f4f18-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.659078 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.659325 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29gvf\" (UniqueName: \"kubernetes.io/projected/5dcf49a2-dd23-4b67-9f54-4659168f4f18-kube-api-access-29gvf\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.659373 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.659401 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.659981 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.660336 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5dcf49a2-dd23-4b67-9f54-4659168f4f18-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.660594 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.671256 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.681739 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.694787 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.695188 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/5dcf49a2-dd23-4b67-9f54-4659168f4f18-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.695388 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dcf49a2-dd23-4b67-9f54-4659168f4f18-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.724882 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29gvf\" (UniqueName: \"kubernetes.io/projected/5dcf49a2-dd23-4b67-9f54-4659168f4f18-kube-api-access-29gvf\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.809718 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.810963 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.815259 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5dcf49a2-dd23-4b67-9f54-4659168f4f18\") " pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.817464 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.817640 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-jc9ms" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.817756 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.872073 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/76a2a736-1945-4e7f-955e-e5c33004d4df-config-data\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.872148 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg7kv\" (UniqueName: \"kubernetes.io/projected/76a2a736-1945-4e7f-955e-e5c33004d4df-kube-api-access-kg7kv\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.872354 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76a2a736-1945-4e7f-955e-e5c33004d4df-combined-ca-bundle\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.872424 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/76a2a736-1945-4e7f-955e-e5c33004d4df-kolla-config\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.872453 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/76a2a736-1945-4e7f-955e-e5c33004d4df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.940420 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.974269 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/76a2a736-1945-4e7f-955e-e5c33004d4df-kolla-config\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.974328 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/76a2a736-1945-4e7f-955e-e5c33004d4df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.974370 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/76a2a736-1945-4e7f-955e-e5c33004d4df-config-data\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.974404 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg7kv\" (UniqueName: \"kubernetes.io/projected/76a2a736-1945-4e7f-955e-e5c33004d4df-kube-api-access-kg7kv\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.974422 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76a2a736-1945-4e7f-955e-e5c33004d4df-combined-ca-bundle\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.975921 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/76a2a736-1945-4e7f-955e-e5c33004d4df-kolla-config\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.976540 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/76a2a736-1945-4e7f-955e-e5c33004d4df-config-data\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.979232 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/76a2a736-1945-4e7f-955e-e5c33004d4df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:33 crc kubenswrapper[4634]: I0929 14:01:33.982339 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76a2a736-1945-4e7f-955e-e5c33004d4df-combined-ca-bundle\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:34 crc kubenswrapper[4634]: I0929 14:01:34.011718 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg7kv\" (UniqueName: \"kubernetes.io/projected/76a2a736-1945-4e7f-955e-e5c33004d4df-kube-api-access-kg7kv\") pod \"memcached-0\" (UID: \"76a2a736-1945-4e7f-955e-e5c33004d4df\") " pod="openstack/memcached-0" Sep 29 14:01:34 crc kubenswrapper[4634]: I0929 14:01:34.032687 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 14:01:34 crc kubenswrapper[4634]: I0929 14:01:34.149289 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 14:01:34 crc kubenswrapper[4634]: I0929 14:01:34.901211 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.013410 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.536007 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.537515 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.541038 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-lbsdh" Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.565372 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.621000 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9ffh\" (UniqueName: \"kubernetes.io/projected/02c51435-cae1-4758-a27a-6e461be7161b-kube-api-access-g9ffh\") pod \"kube-state-metrics-0\" (UID: \"02c51435-cae1-4758-a27a-6e461be7161b\") " pod="openstack/kube-state-metrics-0" Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.703466 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"76a2a736-1945-4e7f-955e-e5c33004d4df","Type":"ContainerStarted","Data":"0c50c77f60c4e450c44b19f9cbcc98f57b24f2fc39b10afa258bf2e67d4f026f"} Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.722920 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9ffh\" (UniqueName: \"kubernetes.io/projected/02c51435-cae1-4758-a27a-6e461be7161b-kube-api-access-g9ffh\") pod \"kube-state-metrics-0\" (UID: \"02c51435-cae1-4758-a27a-6e461be7161b\") " pod="openstack/kube-state-metrics-0" Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.726047 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"5dcf49a2-dd23-4b67-9f54-4659168f4f18","Type":"ContainerStarted","Data":"7ceaa1b5c59924c4d265954a3da03fff73630229c9cbd598807939809b29f9fe"} Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.750938 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9ffh\" (UniqueName: \"kubernetes.io/projected/02c51435-cae1-4758-a27a-6e461be7161b-kube-api-access-g9ffh\") pod \"kube-state-metrics-0\" (UID: \"02c51435-cae1-4758-a27a-6e461be7161b\") " pod="openstack/kube-state-metrics-0" Sep 29 14:01:35 crc kubenswrapper[4634]: I0929 14:01:35.891886 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 14:01:36 crc kubenswrapper[4634]: I0929 14:01:36.413029 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:01:36 crc kubenswrapper[4634]: I0929 14:01:36.778155 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"02c51435-cae1-4758-a27a-6e461be7161b","Type":"ContainerStarted","Data":"1e01abd187383088067c5df28335897b611ef8a77d3cdc5939ca90ddd3ad995e"} Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.622745 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lfvq4"] Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.630638 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.633750 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.634187 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.639361 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-s7gnm" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.649419 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lfvq4"] Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.660878 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-8xcvg"] Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.670994 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.723555 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-8xcvg"] Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776161 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a47ca0-1cd2-4e8d-92ce-37083cde3744-combined-ca-bundle\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776209 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/07a47ca0-1cd2-4e8d-92ce-37083cde3744-ovn-controller-tls-certs\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776238 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-etc-ovs\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776261 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-run\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776294 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a47ca0-1cd2-4e8d-92ce-37083cde3744-scripts\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776323 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkk4n\" (UniqueName: \"kubernetes.io/projected/07a47ca0-1cd2-4e8d-92ce-37083cde3744-kube-api-access-nkk4n\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776357 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-run\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776377 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-log\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776395 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-lib\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776415 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh26s\" (UniqueName: \"kubernetes.io/projected/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-kube-api-access-hh26s\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776432 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-run-ovn\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776456 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-scripts\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.776480 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-log-ovn\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.802545 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.804565 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.810400 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.810781 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.810911 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-w7tlz" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.810989 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.811386 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.823708 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881121 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh26s\" (UniqueName: \"kubernetes.io/projected/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-kube-api-access-hh26s\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881171 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-run-ovn\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881216 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-scripts\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881251 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-log-ovn\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881282 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d077753a-f890-4c33-9d24-d96f3b6117f3-config\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881319 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881363 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881646 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a47ca0-1cd2-4e8d-92ce-37083cde3744-combined-ca-bundle\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881676 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/07a47ca0-1cd2-4e8d-92ce-37083cde3744-ovn-controller-tls-certs\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881706 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-etc-ovs\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881735 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-run\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881765 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d077753a-f890-4c33-9d24-d96f3b6117f3-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.881978 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d077753a-f890-4c33-9d24-d96f3b6117f3-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.882013 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.882072 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a47ca0-1cd2-4e8d-92ce-37083cde3744-scripts\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.882131 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.882183 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl6cp\" (UniqueName: \"kubernetes.io/projected/d077753a-f890-4c33-9d24-d96f3b6117f3-kube-api-access-bl6cp\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.882288 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkk4n\" (UniqueName: \"kubernetes.io/projected/07a47ca0-1cd2-4e8d-92ce-37083cde3744-kube-api-access-nkk4n\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.882418 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-run\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.882464 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-log\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.882492 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-lib\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.887893 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a47ca0-1cd2-4e8d-92ce-37083cde3744-scripts\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.891455 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-lib\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.892327 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-run\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.892454 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-log\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.892646 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-log-ovn\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.892855 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-var-run\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.892881 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-etc-ovs\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.894050 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/07a47ca0-1cd2-4e8d-92ce-37083cde3744-var-run-ovn\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.894935 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-scripts\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.921035 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a47ca0-1cd2-4e8d-92ce-37083cde3744-combined-ca-bundle\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.926765 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh26s\" (UniqueName: \"kubernetes.io/projected/b8867c3c-d76b-4687-a044-15ba4e9b2dc2-kube-api-access-hh26s\") pod \"ovn-controller-ovs-8xcvg\" (UID: \"b8867c3c-d76b-4687-a044-15ba4e9b2dc2\") " pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.932503 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkk4n\" (UniqueName: \"kubernetes.io/projected/07a47ca0-1cd2-4e8d-92ce-37083cde3744-kube-api-access-nkk4n\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.946951 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/07a47ca0-1cd2-4e8d-92ce-37083cde3744-ovn-controller-tls-certs\") pod \"ovn-controller-lfvq4\" (UID: \"07a47ca0-1cd2-4e8d-92ce-37083cde3744\") " pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.977047 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984052 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d077753a-f890-4c33-9d24-d96f3b6117f3-config\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984297 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984340 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984380 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d077753a-f890-4c33-9d24-d96f3b6117f3-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984401 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d077753a-f890-4c33-9d24-d96f3b6117f3-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984424 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984456 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984953 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.985773 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d077753a-f890-4c33-9d24-d96f3b6117f3-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.986644 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d077753a-f890-4c33-9d24-d96f3b6117f3-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.984476 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl6cp\" (UniqueName: \"kubernetes.io/projected/d077753a-f890-4c33-9d24-d96f3b6117f3-kube-api-access-bl6cp\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.988998 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d077753a-f890-4c33-9d24-d96f3b6117f3-config\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.989862 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:39 crc kubenswrapper[4634]: I0929 14:01:39.991833 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:40 crc kubenswrapper[4634]: I0929 14:01:40.003628 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d077753a-f890-4c33-9d24-d96f3b6117f3-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:40 crc kubenswrapper[4634]: I0929 14:01:40.010011 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:01:40 crc kubenswrapper[4634]: I0929 14:01:40.025520 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl6cp\" (UniqueName: \"kubernetes.io/projected/d077753a-f890-4c33-9d24-d96f3b6117f3-kube-api-access-bl6cp\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:40 crc kubenswrapper[4634]: I0929 14:01:40.033664 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"d077753a-f890-4c33-9d24-d96f3b6117f3\") " pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:40 crc kubenswrapper[4634]: I0929 14:01:40.149143 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 14:01:41 crc kubenswrapper[4634]: I0929 14:01:41.391032 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lfvq4"] Sep 29 14:01:41 crc kubenswrapper[4634]: I0929 14:01:41.789125 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.759972 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.774288 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.781119 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-6lqj6" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.781760 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.782524 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.783636 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.804317 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.876061 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.876347 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.876540 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.876597 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8ff4fb7c-c525-4c15-941e-4b8980a5b140-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.876679 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ff4fb7c-c525-4c15-941e-4b8980a5b140-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.876852 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvtl2\" (UniqueName: \"kubernetes.io/projected/8ff4fb7c-c525-4c15-941e-4b8980a5b140-kube-api-access-xvtl2\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.877100 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.877161 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ff4fb7c-c525-4c15-941e-4b8980a5b140-config\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.978985 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.979097 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.979156 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.979193 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8ff4fb7c-c525-4c15-941e-4b8980a5b140-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.979216 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ff4fb7c-c525-4c15-941e-4b8980a5b140-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.979243 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvtl2\" (UniqueName: \"kubernetes.io/projected/8ff4fb7c-c525-4c15-941e-4b8980a5b140-kube-api-access-xvtl2\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.979293 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.979321 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ff4fb7c-c525-4c15-941e-4b8980a5b140-config\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.979923 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.980006 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8ff4fb7c-c525-4c15-941e-4b8980a5b140-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.980434 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ff4fb7c-c525-4c15-941e-4b8980a5b140-config\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.980967 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8ff4fb7c-c525-4c15-941e-4b8980a5b140-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.989935 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.989947 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.990589 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ff4fb7c-c525-4c15-941e-4b8980a5b140-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:42 crc kubenswrapper[4634]: I0929 14:01:42.997500 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvtl2\" (UniqueName: \"kubernetes.io/projected/8ff4fb7c-c525-4c15-941e-4b8980a5b140-kube-api-access-xvtl2\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:43 crc kubenswrapper[4634]: I0929 14:01:43.011834 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8ff4fb7c-c525-4c15-941e-4b8980a5b140\") " pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:43 crc kubenswrapper[4634]: I0929 14:01:43.111168 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 14:01:46 crc kubenswrapper[4634]: W0929 14:01:46.276297 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07a47ca0_1cd2_4e8d_92ce_37083cde3744.slice/crio-bf73c727214bf984b3f325599ff1a6ae6d9f48b5c6fc6610cc30179c93ef3bfc WatchSource:0}: Error finding container bf73c727214bf984b3f325599ff1a6ae6d9f48b5c6fc6610cc30179c93ef3bfc: Status 404 returned error can't find the container with id bf73c727214bf984b3f325599ff1a6ae6d9f48b5c6fc6610cc30179c93ef3bfc Sep 29 14:01:46 crc kubenswrapper[4634]: W0929 14:01:46.279209 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd077753a_f890_4c33_9d24_d96f3b6117f3.slice/crio-c54cabb45e1453008442eacfddb16106e218032ea3232c6c4c3115a931985db7 WatchSource:0}: Error finding container c54cabb45e1453008442eacfddb16106e218032ea3232c6c4c3115a931985db7: Status 404 returned error can't find the container with id c54cabb45e1453008442eacfddb16106e218032ea3232c6c4c3115a931985db7 Sep 29 14:01:47 crc kubenswrapper[4634]: I0929 14:01:47.036784 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d077753a-f890-4c33-9d24-d96f3b6117f3","Type":"ContainerStarted","Data":"c54cabb45e1453008442eacfddb16106e218032ea3232c6c4c3115a931985db7"} Sep 29 14:01:47 crc kubenswrapper[4634]: I0929 14:01:47.039232 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lfvq4" event={"ID":"07a47ca0-1cd2-4e8d-92ce-37083cde3744","Type":"ContainerStarted","Data":"bf73c727214bf984b3f325599ff1a6ae6d9f48b5c6fc6610cc30179c93ef3bfc"} Sep 29 14:01:51 crc kubenswrapper[4634]: I0929 14:01:51.204018 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-8xcvg"] Sep 29 14:01:54 crc kubenswrapper[4634]: W0929 14:01:54.346771 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8867c3c_d76b_4687_a044_15ba4e9b2dc2.slice/crio-3b152a03f290976e36cf95494fb435e91b9b0cb206183417dbf0bfcd702cdfe0 WatchSource:0}: Error finding container 3b152a03f290976e36cf95494fb435e91b9b0cb206183417dbf0bfcd702cdfe0: Status 404 returned error can't find the container with id 3b152a03f290976e36cf95494fb435e91b9b0cb206183417dbf0bfcd702cdfe0 Sep 29 14:01:55 crc kubenswrapper[4634]: I0929 14:01:55.130118 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8xcvg" event={"ID":"b8867c3c-d76b-4687-a044-15ba4e9b2dc2","Type":"ContainerStarted","Data":"3b152a03f290976e36cf95494fb435e91b9b0cb206183417dbf0bfcd702cdfe0"} Sep 29 14:01:55 crc kubenswrapper[4634]: E0929 14:01:55.251527 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Sep 29 14:01:55 crc kubenswrapper[4634]: E0929 14:01:55.252136 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n55dh85h5c8h5bch5bch58bh58ch6h5dch5d5hfch67chd4h98h5d5h59fh84h5h58fh74hfch5b9h64ch5cfh5b6h587h65hf5h696h657h546h684q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kg7kv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(76a2a736-1945-4e7f-955e-e5c33004d4df): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:01:55 crc kubenswrapper[4634]: E0929 14:01:55.253483 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="76a2a736-1945-4e7f-955e-e5c33004d4df" Sep 29 14:01:55 crc kubenswrapper[4634]: E0929 14:01:55.281850 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Sep 29 14:01:55 crc kubenswrapper[4634]: E0929 14:01:55.282135 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:DB_ROOT_PASSWORD,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:DbRootPassword,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secrets,ReadOnly:true,MountPath:/var/lib/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-29gvf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(5dcf49a2-dd23-4b67-9f54-4659168f4f18): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:01:55 crc kubenswrapper[4634]: E0929 14:01:55.283423 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="5dcf49a2-dd23-4b67-9f54-4659168f4f18" Sep 29 14:01:56 crc kubenswrapper[4634]: E0929 14:01:56.141838 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="5dcf49a2-dd23-4b67-9f54-4659168f4f18" Sep 29 14:01:56 crc kubenswrapper[4634]: E0929 14:01:56.141972 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="76a2a736-1945-4e7f-955e-e5c33004d4df" Sep 29 14:01:56 crc kubenswrapper[4634]: E0929 14:01:56.471689 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Sep 29 14:01:56 crc kubenswrapper[4634]: E0929 14:01:56.472130 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hpnmv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(8efec8a2-4905-4ba0-b777-d4e2cd393bd6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:01:56 crc kubenswrapper[4634]: E0929 14:01:56.473509 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" Sep 29 14:01:56 crc kubenswrapper[4634]: E0929 14:01:56.494304 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Sep 29 14:01:56 crc kubenswrapper[4634]: E0929 14:01:56.494508 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c2skm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(dd3a9c91-300c-4510-b7a4-03cf8cbbe729): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:01:56 crc kubenswrapper[4634]: E0929 14:01:56.495762 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" Sep 29 14:01:57 crc kubenswrapper[4634]: E0929 14:01:57.157370 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" Sep 29 14:01:57 crc kubenswrapper[4634]: E0929 14:01:57.164480 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.258673 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-7bt9h"] Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.260451 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.266555 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.280852 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-7bt9h"] Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.294574 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-ovn-rundir\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.294618 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-combined-ca-bundle\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.294644 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-ovs-rundir\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.294666 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6s4g\" (UniqueName: \"kubernetes.io/projected/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-kube-api-access-k6s4g\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.294702 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-config\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.294748 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.396392 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.396774 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-ovn-rundir\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.396871 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-combined-ca-bundle\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.396963 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-ovs-rundir\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.397047 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6s4g\" (UniqueName: \"kubernetes.io/projected/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-kube-api-access-k6s4g\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.397186 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-config\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.398489 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-ovn-rundir\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.399444 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-config\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.399732 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-ovs-rundir\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.411770 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-combined-ca-bundle\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.419220 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.435689 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jlpdn"] Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.451250 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6s4g\" (UniqueName: \"kubernetes.io/projected/5a821c1a-6f5a-47af-bbe6-072b2a2a8033-kube-api-access-k6s4g\") pod \"ovn-controller-metrics-7bt9h\" (UID: \"5a821c1a-6f5a-47af-bbe6-072b2a2a8033\") " pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.490136 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-gbn6f"] Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.491835 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.496539 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.501035 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdm6g\" (UniqueName: \"kubernetes.io/projected/477c0d21-a4fb-48d7-abc3-2078580d9c14-kube-api-access-jdm6g\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.501347 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-config\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.501444 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.501526 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.511653 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-gbn6f"] Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.597273 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-7bt9h" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.602655 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdm6g\" (UniqueName: \"kubernetes.io/projected/477c0d21-a4fb-48d7-abc3-2078580d9c14-kube-api-access-jdm6g\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.602709 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-config\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.602733 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.602756 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.603758 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.603784 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.604765 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-config\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.624232 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdm6g\" (UniqueName: \"kubernetes.io/projected/477c0d21-a4fb-48d7-abc3-2078580d9c14-kube-api-access-jdm6g\") pod \"dnsmasq-dns-7fd796d7df-gbn6f\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:03 crc kubenswrapper[4634]: I0929 14:02:03.823183 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:06 crc kubenswrapper[4634]: I0929 14:02:06.175490 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.583555 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.583737 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zcthv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-jtjjt_openstack(73fae886-50be-4e2c-8fcc-89e6b86646e2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.585135 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" podUID="73fae886-50be-4e2c-8fcc-89e6b86646e2" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.924227 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.924565 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dwnlv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc8479f9-jlpdn_openstack(a5684b31-5b07-4e66-a682-b958a54a51a6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.924567 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.924992 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j8jqt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-rhvml_openstack(578512a9-ebbb-4869-a3d6-b9a09298eeec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.925733 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" podUID="a5684b31-5b07-4e66-a682-b958a54a51a6" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.926399 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" podUID="578512a9-ebbb-4869-a3d6-b9a09298eeec" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.943466 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.943649 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xct5g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-t8mqw_openstack(6588e5a3-1aea-4503-b54e-68cda952b226): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:02:06 crc kubenswrapper[4634]: E0929 14:02:06.944877 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" podUID="6588e5a3-1aea-4503-b54e-68cda952b226" Sep 29 14:02:07 crc kubenswrapper[4634]: E0929 14:02:07.293351 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" podUID="578512a9-ebbb-4869-a3d6-b9a09298eeec" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.306388 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8ff4fb7c-c525-4c15-941e-4b8980a5b140","Type":"ContainerStarted","Data":"c566b00f7c39ecabb5e7de1fd6cbf9fb93698592c488d03e12d4d9e7e8850909"} Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.308701 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" event={"ID":"73fae886-50be-4e2c-8fcc-89e6b86646e2","Type":"ContainerDied","Data":"5d040cc8d78ba850c95e21d08c4a83868db2f982574070cf7656e748aadd5184"} Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.308773 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d040cc8d78ba850c95e21d08c4a83868db2f982574070cf7656e748aadd5184" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.311485 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" event={"ID":"a5684b31-5b07-4e66-a682-b958a54a51a6","Type":"ContainerDied","Data":"69126df4c6d7741e59a20fd017da6ce48db1cdbf4cc0891f41a621bad2b4e1b6"} Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.311531 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69126df4c6d7741e59a20fd017da6ce48db1cdbf4cc0891f41a621bad2b4e1b6" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.312603 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" event={"ID":"6588e5a3-1aea-4503-b54e-68cda952b226","Type":"ContainerDied","Data":"80128d1ee78889825d37be86a15af28469500395a245ee5fb6d834bc8038ec75"} Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.312671 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80128d1ee78889825d37be86a15af28469500395a245ee5fb6d834bc8038ec75" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.361787 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.380979 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.393916 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.470898 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-config\") pod \"a5684b31-5b07-4e66-a682-b958a54a51a6\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.471357 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fae886-50be-4e2c-8fcc-89e6b86646e2-config\") pod \"73fae886-50be-4e2c-8fcc-89e6b86646e2\" (UID: \"73fae886-50be-4e2c-8fcc-89e6b86646e2\") " Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.471447 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-dns-svc\") pod \"a5684b31-5b07-4e66-a682-b958a54a51a6\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.471513 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcthv\" (UniqueName: \"kubernetes.io/projected/73fae886-50be-4e2c-8fcc-89e6b86646e2-kube-api-access-zcthv\") pod \"73fae886-50be-4e2c-8fcc-89e6b86646e2\" (UID: \"73fae886-50be-4e2c-8fcc-89e6b86646e2\") " Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.471535 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwnlv\" (UniqueName: \"kubernetes.io/projected/a5684b31-5b07-4e66-a682-b958a54a51a6-kube-api-access-dwnlv\") pod \"a5684b31-5b07-4e66-a682-b958a54a51a6\" (UID: \"a5684b31-5b07-4e66-a682-b958a54a51a6\") " Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.471802 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73fae886-50be-4e2c-8fcc-89e6b86646e2-config" (OuterVolumeSpecName: "config") pod "73fae886-50be-4e2c-8fcc-89e6b86646e2" (UID: "73fae886-50be-4e2c-8fcc-89e6b86646e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.472037 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73fae886-50be-4e2c-8fcc-89e6b86646e2-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.472666 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a5684b31-5b07-4e66-a682-b958a54a51a6" (UID: "a5684b31-5b07-4e66-a682-b958a54a51a6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.473301 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-config" (OuterVolumeSpecName: "config") pod "a5684b31-5b07-4e66-a682-b958a54a51a6" (UID: "a5684b31-5b07-4e66-a682-b958a54a51a6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.479629 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73fae886-50be-4e2c-8fcc-89e6b86646e2-kube-api-access-zcthv" (OuterVolumeSpecName: "kube-api-access-zcthv") pod "73fae886-50be-4e2c-8fcc-89e6b86646e2" (UID: "73fae886-50be-4e2c-8fcc-89e6b86646e2"). InnerVolumeSpecName "kube-api-access-zcthv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.479981 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5684b31-5b07-4e66-a682-b958a54a51a6-kube-api-access-dwnlv" (OuterVolumeSpecName: "kube-api-access-dwnlv") pod "a5684b31-5b07-4e66-a682-b958a54a51a6" (UID: "a5684b31-5b07-4e66-a682-b958a54a51a6"). InnerVolumeSpecName "kube-api-access-dwnlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.573054 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-config\") pod \"6588e5a3-1aea-4503-b54e-68cda952b226\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.573161 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc\") pod \"6588e5a3-1aea-4503-b54e-68cda952b226\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.573234 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xct5g\" (UniqueName: \"kubernetes.io/projected/6588e5a3-1aea-4503-b54e-68cda952b226-kube-api-access-xct5g\") pod \"6588e5a3-1aea-4503-b54e-68cda952b226\" (UID: \"6588e5a3-1aea-4503-b54e-68cda952b226\") " Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.573686 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.573701 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcthv\" (UniqueName: \"kubernetes.io/projected/73fae886-50be-4e2c-8fcc-89e6b86646e2-kube-api-access-zcthv\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.573713 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwnlv\" (UniqueName: \"kubernetes.io/projected/a5684b31-5b07-4e66-a682-b958a54a51a6-kube-api-access-dwnlv\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.573721 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5684b31-5b07-4e66-a682-b958a54a51a6-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.573843 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-config" (OuterVolumeSpecName: "config") pod "6588e5a3-1aea-4503-b54e-68cda952b226" (UID: "6588e5a3-1aea-4503-b54e-68cda952b226"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.574142 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6588e5a3-1aea-4503-b54e-68cda952b226" (UID: "6588e5a3-1aea-4503-b54e-68cda952b226"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.578412 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6588e5a3-1aea-4503-b54e-68cda952b226-kube-api-access-xct5g" (OuterVolumeSpecName: "kube-api-access-xct5g") pod "6588e5a3-1aea-4503-b54e-68cda952b226" (UID: "6588e5a3-1aea-4503-b54e-68cda952b226"). InnerVolumeSpecName "kube-api-access-xct5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.676074 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.676132 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6588e5a3-1aea-4503-b54e-68cda952b226-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.676147 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xct5g\" (UniqueName: \"kubernetes.io/projected/6588e5a3-1aea-4503-b54e-68cda952b226-kube-api-access-xct5g\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.892885 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-7bt9h"] Sep 29 14:02:08 crc kubenswrapper[4634]: I0929 14:02:08.965246 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-gbn6f"] Sep 29 14:02:09 crc kubenswrapper[4634]: E0929 14:02:09.247799 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Sep 29 14:02:09 crc kubenswrapper[4634]: E0929 14:02:09.248320 4634 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Sep 29 14:02:09 crc kubenswrapper[4634]: E0929 14:02:09.248498 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g9ffh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(02c51435-cae1-4758-a27a-6e461be7161b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 14:02:09 crc kubenswrapper[4634]: E0929 14:02:09.249840 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="02c51435-cae1-4758-a27a-6e461be7161b" Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.353352 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-7bt9h" event={"ID":"5a821c1a-6f5a-47af-bbe6-072b2a2a8033","Type":"ContainerStarted","Data":"f832e3fff3c2e00eccce50fc149a80840b3ba4ef939b9adb53058a1edd21c168"} Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.357398 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" event={"ID":"477c0d21-a4fb-48d7-abc3-2078580d9c14","Type":"ContainerStarted","Data":"126ad22f5d61e13287ca3e0f9724fb4e17bc9ed9c5e41aabf68365115c205638"} Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.357475 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-jtjjt" Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.357435 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-jlpdn" Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.357441 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-t8mqw" Sep 29 14:02:09 crc kubenswrapper[4634]: E0929 14:02:09.486605 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="02c51435-cae1-4758-a27a-6e461be7161b" Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.526297 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtjjt"] Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.548641 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-jtjjt"] Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.593718 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t8mqw"] Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.616444 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-t8mqw"] Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.645606 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jlpdn"] Sep 29 14:02:09 crc kubenswrapper[4634]: I0929 14:02:09.652700 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-jlpdn"] Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.127708 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6588e5a3-1aea-4503-b54e-68cda952b226" path="/var/lib/kubelet/pods/6588e5a3-1aea-4503-b54e-68cda952b226/volumes" Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.128688 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73fae886-50be-4e2c-8fcc-89e6b86646e2" path="/var/lib/kubelet/pods/73fae886-50be-4e2c-8fcc-89e6b86646e2/volumes" Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.129162 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5684b31-5b07-4e66-a682-b958a54a51a6" path="/var/lib/kubelet/pods/a5684b31-5b07-4e66-a682-b958a54a51a6/volumes" Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.377448 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"5dcf49a2-dd23-4b67-9f54-4659168f4f18","Type":"ContainerStarted","Data":"d7e59ebdb70122ba1ffb2f0e5b8c655f8f18a87ea1904589c7bd04f5c9fb598b"} Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.385974 4634 generic.go:334] "Generic (PLEG): container finished" podID="477c0d21-a4fb-48d7-abc3-2078580d9c14" containerID="f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf" exitCode=0 Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.386045 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" event={"ID":"477c0d21-a4fb-48d7-abc3-2078580d9c14","Type":"ContainerDied","Data":"f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf"} Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.391359 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"861151f8-60ad-449e-80fa-b1b64e5c5b3e","Type":"ContainerStarted","Data":"b72411ea58c840db8ef9c97800ec06e505ad156abbd12464d4bd54246504d7ca"} Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.407613 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d077753a-f890-4c33-9d24-d96f3b6117f3","Type":"ContainerStarted","Data":"e48bf48c6684e8e8b43c3d658b8ac18571edb46c58b37978958d6eaba0b4c31e"} Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.429501 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd3a9c91-300c-4510-b7a4-03cf8cbbe729","Type":"ContainerStarted","Data":"a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6"} Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.433468 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"76a2a736-1945-4e7f-955e-e5c33004d4df","Type":"ContainerStarted","Data":"ffc79358406bf0f197421593405d6abbb86b7693904b79a0c3aa16856a4a9bb3"} Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.433676 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.443024 4634 generic.go:334] "Generic (PLEG): container finished" podID="b8867c3c-d76b-4687-a044-15ba4e9b2dc2" containerID="df718a194f791fc9cfbc225ab71f5b6ffb7d0bc3848c85a668f75ee268545840" exitCode=0 Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.443169 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8xcvg" event={"ID":"b8867c3c-d76b-4687-a044-15ba4e9b2dc2","Type":"ContainerDied","Data":"df718a194f791fc9cfbc225ab71f5b6ffb7d0bc3848c85a668f75ee268545840"} Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.461312 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lfvq4" event={"ID":"07a47ca0-1cd2-4e8d-92ce-37083cde3744","Type":"ContainerStarted","Data":"8aef53ea208b3c0522ff473349eccfe6bc3a202a265026c100d75b5bea6b7350"} Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.461851 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-lfvq4" Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.533556 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.09829417 podStartE2EDuration="37.53353507s" podCreationTimestamp="2025-09-29 14:01:33 +0000 UTC" firstStartedPulling="2025-09-29 14:01:35.084655556 +0000 UTC m=+1025.653383305" lastFinishedPulling="2025-09-29 14:02:09.519896456 +0000 UTC m=+1060.088624205" observedRunningTime="2025-09-29 14:02:10.520991967 +0000 UTC m=+1061.089719716" watchObservedRunningTime="2025-09-29 14:02:10.53353507 +0000 UTC m=+1061.102262819" Sep 29 14:02:10 crc kubenswrapper[4634]: I0929 14:02:10.548754 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-lfvq4" podStartSLOduration=8.671774379 podStartE2EDuration="31.548727095s" podCreationTimestamp="2025-09-29 14:01:39 +0000 UTC" firstStartedPulling="2025-09-29 14:01:46.281028535 +0000 UTC m=+1036.849756284" lastFinishedPulling="2025-09-29 14:02:09.157981251 +0000 UTC m=+1059.726709000" observedRunningTime="2025-09-29 14:02:10.537738384 +0000 UTC m=+1061.106466133" watchObservedRunningTime="2025-09-29 14:02:10.548727095 +0000 UTC m=+1061.117454844" Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.473695 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8xcvg" event={"ID":"b8867c3c-d76b-4687-a044-15ba4e9b2dc2","Type":"ContainerStarted","Data":"0f6c172ccb487577ce868a91053d29a4f24e5f4c1d7b6aeca43b6bd2a775cdf9"} Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.474159 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-8xcvg" event={"ID":"b8867c3c-d76b-4687-a044-15ba4e9b2dc2","Type":"ContainerStarted","Data":"daead22ffad26889494b4fe1ce75de0033d84b1764438963b176236703cf12ae"} Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.474569 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.474600 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.476771 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" event={"ID":"477c0d21-a4fb-48d7-abc3-2078580d9c14","Type":"ContainerStarted","Data":"69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee"} Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.476912 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.479439 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8ff4fb7c-c525-4c15-941e-4b8980a5b140","Type":"ContainerStarted","Data":"27d4c038cf9d71024465f98297702f38209c128b60ea56fd674896af9bedebe6"} Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.499782 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-8xcvg" podStartSLOduration=18.576017246 podStartE2EDuration="32.499759468s" podCreationTimestamp="2025-09-29 14:01:39 +0000 UTC" firstStartedPulling="2025-09-29 14:01:54.351637505 +0000 UTC m=+1044.920365264" lastFinishedPulling="2025-09-29 14:02:08.275379737 +0000 UTC m=+1058.844107486" observedRunningTime="2025-09-29 14:02:11.492213872 +0000 UTC m=+1062.060941621" watchObservedRunningTime="2025-09-29 14:02:11.499759468 +0000 UTC m=+1062.068487207" Sep 29 14:02:11 crc kubenswrapper[4634]: I0929 14:02:11.521919 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" podStartSLOduration=7.952464441 podStartE2EDuration="8.521898193s" podCreationTimestamp="2025-09-29 14:02:03 +0000 UTC" firstStartedPulling="2025-09-29 14:02:09.178337638 +0000 UTC m=+1059.747065387" lastFinishedPulling="2025-09-29 14:02:09.74777139 +0000 UTC m=+1060.316499139" observedRunningTime="2025-09-29 14:02:11.517068171 +0000 UTC m=+1062.085795930" watchObservedRunningTime="2025-09-29 14:02:11.521898193 +0000 UTC m=+1062.090625942" Sep 29 14:02:13 crc kubenswrapper[4634]: I0929 14:02:13.515052 4634 generic.go:334] "Generic (PLEG): container finished" podID="5dcf49a2-dd23-4b67-9f54-4659168f4f18" containerID="d7e59ebdb70122ba1ffb2f0e5b8c655f8f18a87ea1904589c7bd04f5c9fb598b" exitCode=0 Sep 29 14:02:13 crc kubenswrapper[4634]: I0929 14:02:13.515922 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"5dcf49a2-dd23-4b67-9f54-4659168f4f18","Type":"ContainerDied","Data":"d7e59ebdb70122ba1ffb2f0e5b8c655f8f18a87ea1904589c7bd04f5c9fb598b"} Sep 29 14:02:13 crc kubenswrapper[4634]: I0929 14:02:13.520108 4634 generic.go:334] "Generic (PLEG): container finished" podID="861151f8-60ad-449e-80fa-b1b64e5c5b3e" containerID="b72411ea58c840db8ef9c97800ec06e505ad156abbd12464d4bd54246504d7ca" exitCode=0 Sep 29 14:02:13 crc kubenswrapper[4634]: I0929 14:02:13.520144 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"861151f8-60ad-449e-80fa-b1b64e5c5b3e","Type":"ContainerDied","Data":"b72411ea58c840db8ef9c97800ec06e505ad156abbd12464d4bd54246504d7ca"} Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.151351 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.529590 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"d077753a-f890-4c33-9d24-d96f3b6117f3","Type":"ContainerStarted","Data":"ad027073149e6bdbaaa92c9a04f596ee10e5daee3e56c4c1f7a33969e5b4ad05"} Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.532372 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-7bt9h" event={"ID":"5a821c1a-6f5a-47af-bbe6-072b2a2a8033","Type":"ContainerStarted","Data":"782497cba17b00a2d32eb089956cd1bdd4543cfaf7176ac242a583b7a483ca2e"} Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.535382 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"5dcf49a2-dd23-4b67-9f54-4659168f4f18","Type":"ContainerStarted","Data":"37e105ac9ac626a6bddafa1bd5679db24417248a889cc4ba8a9babba3d6bc445"} Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.537750 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"8ff4fb7c-c525-4c15-941e-4b8980a5b140","Type":"ContainerStarted","Data":"94becbfb2a7cc7ed79855591ec18e836d992dfb1fe12c05a08baf7af40821b80"} Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.540596 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"861151f8-60ad-449e-80fa-b1b64e5c5b3e","Type":"ContainerStarted","Data":"3dc0a59f2a2fed324a3d51e2bf83b59c5e1da9b7d1d7e22a479be7b8fa082528"} Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.542376 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8efec8a2-4905-4ba0-b777-d4e2cd393bd6","Type":"ContainerStarted","Data":"0617af5024010462e3ff97d997662259c347192a23b50e962760fa8e5e1b4604"} Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.553872 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=9.218721475 podStartE2EDuration="36.553854429s" podCreationTimestamp="2025-09-29 14:01:38 +0000 UTC" firstStartedPulling="2025-09-29 14:01:46.280864721 +0000 UTC m=+1036.849592510" lastFinishedPulling="2025-09-29 14:02:13.615997685 +0000 UTC m=+1064.184725464" observedRunningTime="2025-09-29 14:02:14.552962394 +0000 UTC m=+1065.121690143" watchObservedRunningTime="2025-09-29 14:02:14.553854429 +0000 UTC m=+1065.122582178" Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.585359 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=12.267236454 podStartE2EDuration="44.585339959s" podCreationTimestamp="2025-09-29 14:01:30 +0000 UTC" firstStartedPulling="2025-09-29 14:01:33.322964067 +0000 UTC m=+1023.891691816" lastFinishedPulling="2025-09-29 14:02:05.641067572 +0000 UTC m=+1056.209795321" observedRunningTime="2025-09-29 14:02:14.582780859 +0000 UTC m=+1065.151508608" watchObservedRunningTime="2025-09-29 14:02:14.585339959 +0000 UTC m=+1065.154067708" Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.607563 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371994.247236 podStartE2EDuration="42.607538975s" podCreationTimestamp="2025-09-29 14:01:32 +0000 UTC" firstStartedPulling="2025-09-29 14:01:34.992598664 +0000 UTC m=+1025.561326413" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:02:14.606354092 +0000 UTC m=+1065.175081861" watchObservedRunningTime="2025-09-29 14:02:14.607538975 +0000 UTC m=+1065.176266714" Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.628881 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-7bt9h" podStartSLOduration=7.201833951 podStartE2EDuration="11.628862407s" podCreationTimestamp="2025-09-29 14:02:03 +0000 UTC" firstStartedPulling="2025-09-29 14:02:09.159804942 +0000 UTC m=+1059.728532691" lastFinishedPulling="2025-09-29 14:02:13.586833408 +0000 UTC m=+1064.155561147" observedRunningTime="2025-09-29 14:02:14.624475257 +0000 UTC m=+1065.193202996" watchObservedRunningTime="2025-09-29 14:02:14.628862407 +0000 UTC m=+1065.197590156" Sep 29 14:02:14 crc kubenswrapper[4634]: I0929 14:02:14.696609 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=28.155748281 podStartE2EDuration="33.696583077s" podCreationTimestamp="2025-09-29 14:01:41 +0000 UTC" firstStartedPulling="2025-09-29 14:02:08.04482641 +0000 UTC m=+1058.613554159" lastFinishedPulling="2025-09-29 14:02:13.585661206 +0000 UTC m=+1064.154388955" observedRunningTime="2025-09-29 14:02:14.689469832 +0000 UTC m=+1065.258197581" watchObservedRunningTime="2025-09-29 14:02:14.696583077 +0000 UTC m=+1065.265310826" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.072221 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rhvml"] Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.129497 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-twmhj"] Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.130972 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.135268 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.145957 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-twmhj"] Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.153053 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.211848 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.211898 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.211991 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgv64\" (UniqueName: \"kubernetes.io/projected/4850732f-63f0-44a8-868b-ab1746b63f43-kube-api-access-fgv64\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.212075 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.212102 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-config\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.318909 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.319529 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-config\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.319824 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.319922 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.320014 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgv64\" (UniqueName: \"kubernetes.io/projected/4850732f-63f0-44a8-868b-ab1746b63f43-kube-api-access-fgv64\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.321777 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.321793 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.322279 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-config\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.324847 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.345958 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgv64\" (UniqueName: \"kubernetes.io/projected/4850732f-63f0-44a8-868b-ab1746b63f43-kube-api-access-fgv64\") pod \"dnsmasq-dns-86db49b7ff-twmhj\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.471688 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.554651 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" event={"ID":"578512a9-ebbb-4869-a3d6-b9a09298eeec","Type":"ContainerDied","Data":"704c966cc9fe177c4cc2f3acaf464d750288878f28a6a14b894038c7ece3b935"} Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.554717 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="704c966cc9fe177c4cc2f3acaf464d750288878f28a6a14b894038c7ece3b935" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.569233 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.627366 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-dns-svc\") pod \"578512a9-ebbb-4869-a3d6-b9a09298eeec\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.627495 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-config\") pod \"578512a9-ebbb-4869-a3d6-b9a09298eeec\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.627548 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8jqt\" (UniqueName: \"kubernetes.io/projected/578512a9-ebbb-4869-a3d6-b9a09298eeec-kube-api-access-j8jqt\") pod \"578512a9-ebbb-4869-a3d6-b9a09298eeec\" (UID: \"578512a9-ebbb-4869-a3d6-b9a09298eeec\") " Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.627957 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "578512a9-ebbb-4869-a3d6-b9a09298eeec" (UID: "578512a9-ebbb-4869-a3d6-b9a09298eeec"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.628464 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.629532 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-config" (OuterVolumeSpecName: "config") pod "578512a9-ebbb-4869-a3d6-b9a09298eeec" (UID: "578512a9-ebbb-4869-a3d6-b9a09298eeec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.638336 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/578512a9-ebbb-4869-a3d6-b9a09298eeec-kube-api-access-j8jqt" (OuterVolumeSpecName: "kube-api-access-j8jqt") pod "578512a9-ebbb-4869-a3d6-b9a09298eeec" (UID: "578512a9-ebbb-4869-a3d6-b9a09298eeec"). InnerVolumeSpecName "kube-api-access-j8jqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.730696 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/578512a9-ebbb-4869-a3d6-b9a09298eeec-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:15 crc kubenswrapper[4634]: I0929 14:02:15.730741 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8jqt\" (UniqueName: \"kubernetes.io/projected/578512a9-ebbb-4869-a3d6-b9a09298eeec-kube-api-access-j8jqt\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.078519 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-gbn6f"] Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.092737 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" podUID="477c0d21-a4fb-48d7-abc3-2078580d9c14" containerName="dnsmasq-dns" containerID="cri-o://69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee" gracePeriod=10 Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.104402 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.158540 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.158964 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.165704 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-twmhj"] Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.221336 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-f8sjj"] Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.228095 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.272440 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-f8sjj"] Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.332468 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.343140 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.343187 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-config\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.343208 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnfw4\" (UniqueName: \"kubernetes.io/projected/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-kube-api-access-mnfw4\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.343337 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-dns-svc\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.343663 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.481246 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.481332 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.481354 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-config\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.481371 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnfw4\" (UniqueName: \"kubernetes.io/projected/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-kube-api-access-mnfw4\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.481414 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-dns-svc\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.482864 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-dns-svc\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.483507 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.484067 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.484622 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-config\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.523269 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnfw4\" (UniqueName: \"kubernetes.io/projected/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-kube-api-access-mnfw4\") pod \"dnsmasq-dns-698758b865-f8sjj\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.579194 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" event={"ID":"4850732f-63f0-44a8-868b-ab1746b63f43","Type":"ContainerStarted","Data":"13c8519e604439870bf67258f791d4e023ab88e7fe0be5a88101e26ed1c4fddf"} Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.579339 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-rhvml" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.579721 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.580798 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.692234 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rhvml"] Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.708940 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rhvml"] Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.722230 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 14:02:16 crc kubenswrapper[4634]: I0929 14:02:16.783511 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.217312 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.237264 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.248093 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.248628 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.248863 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-25wmd" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.250238 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.250537 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.305248 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:17 crc kubenswrapper[4634]: W0929 14:02:17.306303 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2a0ccf1_a36f_4573_8b3c_59cf92e096ab.slice/crio-78e5a9659ec0bfe40ab9d183cc8e240372463eaffa8922533b25b1cd735e538c WatchSource:0}: Error finding container 78e5a9659ec0bfe40ab9d183cc8e240372463eaffa8922533b25b1cd735e538c: Status 404 returned error can't find the container with id 78e5a9659ec0bfe40ab9d183cc8e240372463eaffa8922533b25b1cd735e538c Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.309203 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-f8sjj"] Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.417385 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-config\") pod \"477c0d21-a4fb-48d7-abc3-2078580d9c14\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.417979 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-ovsdbserver-nb\") pod \"477c0d21-a4fb-48d7-abc3-2078580d9c14\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.418189 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-dns-svc\") pod \"477c0d21-a4fb-48d7-abc3-2078580d9c14\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.418233 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdm6g\" (UniqueName: \"kubernetes.io/projected/477c0d21-a4fb-48d7-abc3-2078580d9c14-kube-api-access-jdm6g\") pod \"477c0d21-a4fb-48d7-abc3-2078580d9c14\" (UID: \"477c0d21-a4fb-48d7-abc3-2078580d9c14\") " Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.418498 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvfcs\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-kube-api-access-rvfcs\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.418564 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.418592 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/80d6ffb7-ae89-453f-8694-074a86517297-cache\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.425476 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477c0d21-a4fb-48d7-abc3-2078580d9c14-kube-api-access-jdm6g" (OuterVolumeSpecName: "kube-api-access-jdm6g") pod "477c0d21-a4fb-48d7-abc3-2078580d9c14" (UID: "477c0d21-a4fb-48d7-abc3-2078580d9c14"). InnerVolumeSpecName "kube-api-access-jdm6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.425545 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.425630 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/80d6ffb7-ae89-453f-8694-074a86517297-lock\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.425874 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdm6g\" (UniqueName: \"kubernetes.io/projected/477c0d21-a4fb-48d7-abc3-2078580d9c14-kube-api-access-jdm6g\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.487408 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-config" (OuterVolumeSpecName: "config") pod "477c0d21-a4fb-48d7-abc3-2078580d9c14" (UID: "477c0d21-a4fb-48d7-abc3-2078580d9c14"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.493640 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "477c0d21-a4fb-48d7-abc3-2078580d9c14" (UID: "477c0d21-a4fb-48d7-abc3-2078580d9c14"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.495863 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "477c0d21-a4fb-48d7-abc3-2078580d9c14" (UID: "477c0d21-a4fb-48d7-abc3-2078580d9c14"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527186 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527229 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/80d6ffb7-ae89-453f-8694-074a86517297-lock\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527268 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvfcs\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-kube-api-access-rvfcs\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527314 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527341 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/80d6ffb7-ae89-453f-8694-074a86517297-cache\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527395 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527406 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527417 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/477c0d21-a4fb-48d7-abc3-2078580d9c14-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.527904 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/80d6ffb7-ae89-453f-8694-074a86517297-cache\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.528250 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.529385 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/80d6ffb7-ae89-453f-8694-074a86517297-lock\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: E0929 14:02:17.529518 4634 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 14:02:17 crc kubenswrapper[4634]: E0929 14:02:17.529534 4634 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 14:02:17 crc kubenswrapper[4634]: E0929 14:02:17.529590 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift podName:80d6ffb7-ae89-453f-8694-074a86517297 nodeName:}" failed. No retries permitted until 2025-09-29 14:02:18.029572768 +0000 UTC m=+1068.598300517 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift") pod "swift-storage-0" (UID: "80d6ffb7-ae89-453f-8694-074a86517297") : configmap "swift-ring-files" not found Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.547689 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvfcs\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-kube-api-access-rvfcs\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.552084 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.595519 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-f8sjj" event={"ID":"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab","Type":"ContainerStarted","Data":"78e5a9659ec0bfe40ab9d183cc8e240372463eaffa8922533b25b1cd735e538c"} Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.598452 4634 generic.go:334] "Generic (PLEG): container finished" podID="4850732f-63f0-44a8-868b-ab1746b63f43" containerID="5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c" exitCode=0 Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.599633 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" event={"ID":"4850732f-63f0-44a8-868b-ab1746b63f43","Type":"ContainerDied","Data":"5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c"} Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.609434 4634 generic.go:334] "Generic (PLEG): container finished" podID="477c0d21-a4fb-48d7-abc3-2078580d9c14" containerID="69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee" exitCode=0 Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.609729 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" event={"ID":"477c0d21-a4fb-48d7-abc3-2078580d9c14","Type":"ContainerDied","Data":"69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee"} Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.609810 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" event={"ID":"477c0d21-a4fb-48d7-abc3-2078580d9c14","Type":"ContainerDied","Data":"126ad22f5d61e13287ca3e0f9724fb4e17bc9ed9c5e41aabf68365115c205638"} Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.609834 4634 scope.go:117] "RemoveContainer" containerID="69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.610432 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-gbn6f" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.690007 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.737106 4634 scope.go:117] "RemoveContainer" containerID="f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.812193 4634 scope.go:117] "RemoveContainer" containerID="69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee" Sep 29 14:02:17 crc kubenswrapper[4634]: E0929 14:02:17.812673 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee\": container with ID starting with 69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee not found: ID does not exist" containerID="69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.812706 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee"} err="failed to get container status \"69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee\": rpc error: code = NotFound desc = could not find container \"69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee\": container with ID starting with 69c56e01d13838c2cc529249e26a984626ca9b86f949cb48e7a0faa479ee0fee not found: ID does not exist" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.812731 4634 scope.go:117] "RemoveContainer" containerID="f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf" Sep 29 14:02:17 crc kubenswrapper[4634]: E0929 14:02:17.813431 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf\": container with ID starting with f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf not found: ID does not exist" containerID="f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.813499 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf"} err="failed to get container status \"f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf\": rpc error: code = NotFound desc = could not find container \"f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf\": container with ID starting with f685916f66ff78eb4e2453a887440e6b06acfec05615706b0b3ce70d48bfa6cf not found: ID does not exist" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.816644 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-gbn6f"] Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.835480 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-gbn6f"] Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.971778 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 29 14:02:17 crc kubenswrapper[4634]: E0929 14:02:17.975417 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477c0d21-a4fb-48d7-abc3-2078580d9c14" containerName="dnsmasq-dns" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.975953 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="477c0d21-a4fb-48d7-abc3-2078580d9c14" containerName="dnsmasq-dns" Sep 29 14:02:17 crc kubenswrapper[4634]: E0929 14:02:17.982962 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477c0d21-a4fb-48d7-abc3-2078580d9c14" containerName="init" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.983232 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="477c0d21-a4fb-48d7-abc3-2078580d9c14" containerName="init" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.983789 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="477c0d21-a4fb-48d7-abc3-2078580d9c14" containerName="dnsmasq-dns" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.985532 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.988652 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.988957 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.989130 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-vfcnw" Sep 29 14:02:17 crc kubenswrapper[4634]: I0929 14:02:17.989316 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:17.999981 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.051540 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:18 crc kubenswrapper[4634]: E0929 14:02:18.051796 4634 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 14:02:18 crc kubenswrapper[4634]: E0929 14:02:18.051949 4634 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 14:02:18 crc kubenswrapper[4634]: E0929 14:02:18.052025 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift podName:80d6ffb7-ae89-453f-8694-074a86517297 nodeName:}" failed. No retries permitted until 2025-09-29 14:02:19.052001206 +0000 UTC m=+1069.620728955 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift") pod "swift-storage-0" (UID: "80d6ffb7-ae89-453f-8694-074a86517297") : configmap "swift-ring-files" not found Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.052164 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0288fa06-e56a-4201-a883-d1ece43562ac-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.052264 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0288fa06-e56a-4201-a883-d1ece43562ac-scripts\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.052395 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.052505 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbr22\" (UniqueName: \"kubernetes.io/projected/0288fa06-e56a-4201-a883-d1ece43562ac-kube-api-access-fbr22\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.052663 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.052781 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.052897 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0288fa06-e56a-4201-a883-d1ece43562ac-config\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: E0929 14:02:18.092027 4634 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Sep 29 14:02:18 crc kubenswrapper[4634]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/4850732f-63f0-44a8-868b-ab1746b63f43/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Sep 29 14:02:18 crc kubenswrapper[4634]: > podSandboxID="13c8519e604439870bf67258f791d4e023ab88e7fe0be5a88101e26ed1c4fddf" Sep 29 14:02:18 crc kubenswrapper[4634]: E0929 14:02:18.092442 4634 kuberuntime_manager.go:1274] "Unhandled Error" err=< Sep 29 14:02:18 crc kubenswrapper[4634]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n599h5cbh7ch5d4h66fh676hdbh546h95h88h5ffh55ch7fhch57ch687hddhc7h5fdh57dh674h56fh64ch98h9bh557h55dh646h54ch54fh5c4h597q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fgv64,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-86db49b7ff-twmhj_openstack(4850732f-63f0-44a8-868b-ab1746b63f43): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/4850732f-63f0-44a8-868b-ab1746b63f43/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Sep 29 14:02:18 crc kubenswrapper[4634]: > logger="UnhandledError" Sep 29 14:02:18 crc kubenswrapper[4634]: E0929 14:02:18.093885 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/4850732f-63f0-44a8-868b-ab1746b63f43/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" podUID="4850732f-63f0-44a8-868b-ab1746b63f43" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.120301 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477c0d21-a4fb-48d7-abc3-2078580d9c14" path="/var/lib/kubelet/pods/477c0d21-a4fb-48d7-abc3-2078580d9c14/volumes" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.120907 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="578512a9-ebbb-4869-a3d6-b9a09298eeec" path="/var/lib/kubelet/pods/578512a9-ebbb-4869-a3d6-b9a09298eeec/volumes" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.154596 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0288fa06-e56a-4201-a883-d1ece43562ac-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.154639 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0288fa06-e56a-4201-a883-d1ece43562ac-scripts\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.154674 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.154704 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbr22\" (UniqueName: \"kubernetes.io/projected/0288fa06-e56a-4201-a883-d1ece43562ac-kube-api-access-fbr22\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.154764 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.154790 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.154816 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0288fa06-e56a-4201-a883-d1ece43562ac-config\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.155210 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0288fa06-e56a-4201-a883-d1ece43562ac-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.155799 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0288fa06-e56a-4201-a883-d1ece43562ac-scripts\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.156906 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0288fa06-e56a-4201-a883-d1ece43562ac-config\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.159513 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.159732 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.159904 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0288fa06-e56a-4201-a883-d1ece43562ac-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.175297 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbr22\" (UniqueName: \"kubernetes.io/projected/0288fa06-e56a-4201-a883-d1ece43562ac-kube-api-access-fbr22\") pod \"ovn-northd-0\" (UID: \"0288fa06-e56a-4201-a883-d1ece43562ac\") " pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.317940 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 14:02:18 crc kubenswrapper[4634]: I0929 14:02:18.826587 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 14:02:18 crc kubenswrapper[4634]: W0929 14:02:18.844543 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0288fa06_e56a_4201_a883_d1ece43562ac.slice/crio-8d4541b85da9efb89e19a192b155f6bafdbcfc31cc3057376f2a4eedf66f778b WatchSource:0}: Error finding container 8d4541b85da9efb89e19a192b155f6bafdbcfc31cc3057376f2a4eedf66f778b: Status 404 returned error can't find the container with id 8d4541b85da9efb89e19a192b155f6bafdbcfc31cc3057376f2a4eedf66f778b Sep 29 14:02:19 crc kubenswrapper[4634]: I0929 14:02:19.078577 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:19 crc kubenswrapper[4634]: E0929 14:02:19.078840 4634 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 14:02:19 crc kubenswrapper[4634]: E0929 14:02:19.079139 4634 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 14:02:19 crc kubenswrapper[4634]: E0929 14:02:19.079220 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift podName:80d6ffb7-ae89-453f-8694-074a86517297 nodeName:}" failed. No retries permitted until 2025-09-29 14:02:21.07919826 +0000 UTC m=+1071.647926009 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift") pod "swift-storage-0" (UID: "80d6ffb7-ae89-453f-8694-074a86517297") : configmap "swift-ring-files" not found Sep 29 14:02:19 crc kubenswrapper[4634]: I0929 14:02:19.630141 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0288fa06-e56a-4201-a883-d1ece43562ac","Type":"ContainerStarted","Data":"8d4541b85da9efb89e19a192b155f6bafdbcfc31cc3057376f2a4eedf66f778b"} Sep 29 14:02:20 crc kubenswrapper[4634]: I0929 14:02:20.640494 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-f8sjj" event={"ID":"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab","Type":"ContainerStarted","Data":"852983e6cc94c98ca2822eaae81a17239150bce641d539329ef81c5676d73649"} Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.057500 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-4xhpv"] Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.095687 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.098418 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-4xhpv"] Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.106573 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.106860 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.107005 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.128365 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-swiftconf\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.128614 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cd7s\" (UniqueName: \"kubernetes.io/projected/2d10ea94-d237-4954-8588-e5dfea94cae8-kube-api-access-6cd7s\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.128843 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-combined-ca-bundle\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.128960 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-scripts\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.129057 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-dispersionconf\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.129138 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.129266 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-ring-data-devices\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.129302 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d10ea94-d237-4954-8588-e5dfea94cae8-etc-swift\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: E0929 14:02:21.129646 4634 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 14:02:21 crc kubenswrapper[4634]: E0929 14:02:21.129662 4634 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 14:02:21 crc kubenswrapper[4634]: E0929 14:02:21.129705 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift podName:80d6ffb7-ae89-453f-8694-074a86517297 nodeName:}" failed. No retries permitted until 2025-09-29 14:02:25.129689651 +0000 UTC m=+1075.698417400 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift") pod "swift-storage-0" (UID: "80d6ffb7-ae89-453f-8694-074a86517297") : configmap "swift-ring-files" not found Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.142986 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-4xhpv"] Sep 29 14:02:21 crc kubenswrapper[4634]: E0929 14:02:21.144810 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-6cd7s ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-4xhpv" podUID="2d10ea94-d237-4954-8588-e5dfea94cae8" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.165750 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-9q4mg"] Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.167266 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.234892 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-scripts\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.235438 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-dispersionconf\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.235562 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-dispersionconf\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.235643 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-combined-ca-bundle\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.235758 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-ring-data-devices\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.235831 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d10ea94-d237-4954-8588-e5dfea94cae8-etc-swift\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.236135 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slj5j\" (UniqueName: \"kubernetes.io/projected/6ad020b7-a243-46de-8a47-2bb8af6042a0-kube-api-access-slj5j\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.236272 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-ring-data-devices\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.236465 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-swiftconf\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.236561 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cd7s\" (UniqueName: \"kubernetes.io/projected/2d10ea94-d237-4954-8588-e5dfea94cae8-kube-api-access-6cd7s\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.236766 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-scripts\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.236864 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-swiftconf\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.237043 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6ad020b7-a243-46de-8a47-2bb8af6042a0-etc-swift\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.237154 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-combined-ca-bundle\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.238876 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-scripts\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.239333 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-9q4mg"] Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.239749 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d10ea94-d237-4954-8588-e5dfea94cae8-etc-swift\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.239858 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-ring-data-devices\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.247766 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-combined-ca-bundle\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.249131 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-dispersionconf\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.260916 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-swiftconf\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.261603 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cd7s\" (UniqueName: \"kubernetes.io/projected/2d10ea94-d237-4954-8588-e5dfea94cae8-kube-api-access-6cd7s\") pod \"swift-ring-rebalance-4xhpv\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.338725 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slj5j\" (UniqueName: \"kubernetes.io/projected/6ad020b7-a243-46de-8a47-2bb8af6042a0-kube-api-access-slj5j\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.339044 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-ring-data-devices\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.339177 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-scripts\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.339274 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-swiftconf\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.339377 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6ad020b7-a243-46de-8a47-2bb8af6042a0-etc-swift\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.339488 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-dispersionconf\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.339565 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-combined-ca-bundle\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.340570 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6ad020b7-a243-46de-8a47-2bb8af6042a0-etc-swift\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.341458 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-ring-data-devices\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.341729 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-scripts\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.343342 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-combined-ca-bundle\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.344553 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-swiftconf\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.345148 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-dispersionconf\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.368187 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slj5j\" (UniqueName: \"kubernetes.io/projected/6ad020b7-a243-46de-8a47-2bb8af6042a0-kube-api-access-slj5j\") pod \"swift-ring-rebalance-9q4mg\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.500358 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.654844 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" event={"ID":"4850732f-63f0-44a8-868b-ab1746b63f43","Type":"ContainerStarted","Data":"764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31"} Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.655909 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.659273 4634 generic.go:334] "Generic (PLEG): container finished" podID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" containerID="852983e6cc94c98ca2822eaae81a17239150bce641d539329ef81c5676d73649" exitCode=0 Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.659347 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-f8sjj" event={"ID":"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab","Type":"ContainerDied","Data":"852983e6cc94c98ca2822eaae81a17239150bce641d539329ef81c5676d73649"} Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.659460 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.671041 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.714674 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" podStartSLOduration=6.714642797 podStartE2EDuration="6.714642797s" podCreationTimestamp="2025-09-29 14:02:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:02:21.686746955 +0000 UTC m=+1072.255474704" watchObservedRunningTime="2025-09-29 14:02:21.714642797 +0000 UTC m=+1072.283370556" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.752668 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-swiftconf\") pod \"2d10ea94-d237-4954-8588-e5dfea94cae8\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.752715 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-ring-data-devices\") pod \"2d10ea94-d237-4954-8588-e5dfea94cae8\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.752798 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d10ea94-d237-4954-8588-e5dfea94cae8-etc-swift\") pod \"2d10ea94-d237-4954-8588-e5dfea94cae8\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.752825 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cd7s\" (UniqueName: \"kubernetes.io/projected/2d10ea94-d237-4954-8588-e5dfea94cae8-kube-api-access-6cd7s\") pod \"2d10ea94-d237-4954-8588-e5dfea94cae8\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.752850 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-dispersionconf\") pod \"2d10ea94-d237-4954-8588-e5dfea94cae8\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.752917 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-combined-ca-bundle\") pod \"2d10ea94-d237-4954-8588-e5dfea94cae8\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.752968 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-scripts\") pod \"2d10ea94-d237-4954-8588-e5dfea94cae8\" (UID: \"2d10ea94-d237-4954-8588-e5dfea94cae8\") " Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.756288 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-scripts" (OuterVolumeSpecName: "scripts") pod "2d10ea94-d237-4954-8588-e5dfea94cae8" (UID: "2d10ea94-d237-4954-8588-e5dfea94cae8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.758198 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.761827 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d10ea94-d237-4954-8588-e5dfea94cae8-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2d10ea94-d237-4954-8588-e5dfea94cae8" (UID: "2d10ea94-d237-4954-8588-e5dfea94cae8"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.765036 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d10ea94-d237-4954-8588-e5dfea94cae8-kube-api-access-6cd7s" (OuterVolumeSpecName: "kube-api-access-6cd7s") pod "2d10ea94-d237-4954-8588-e5dfea94cae8" (UID: "2d10ea94-d237-4954-8588-e5dfea94cae8"). InnerVolumeSpecName "kube-api-access-6cd7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.767467 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d10ea94-d237-4954-8588-e5dfea94cae8" (UID: "2d10ea94-d237-4954-8588-e5dfea94cae8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.768323 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2d10ea94-d237-4954-8588-e5dfea94cae8" (UID: "2d10ea94-d237-4954-8588-e5dfea94cae8"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.769375 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2d10ea94-d237-4954-8588-e5dfea94cae8" (UID: "2d10ea94-d237-4954-8588-e5dfea94cae8"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.771845 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2d10ea94-d237-4954-8588-e5dfea94cae8" (UID: "2d10ea94-d237-4954-8588-e5dfea94cae8"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.859625 4634 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.859660 4634 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2d10ea94-d237-4954-8588-e5dfea94cae8-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.859670 4634 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2d10ea94-d237-4954-8588-e5dfea94cae8-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.859681 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cd7s\" (UniqueName: \"kubernetes.io/projected/2d10ea94-d237-4954-8588-e5dfea94cae8-kube-api-access-6cd7s\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.859691 4634 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:21 crc kubenswrapper[4634]: I0929 14:02:21.859701 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d10ea94-d237-4954-8588-e5dfea94cae8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.230052 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-9q4mg"] Sep 29 14:02:22 crc kubenswrapper[4634]: W0929 14:02:22.232006 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ad020b7_a243_46de_8a47_2bb8af6042a0.slice/crio-09f6f092204e299d4ac1f9bc1db8b61d599a20924e144f2158084d134abeb5d1 WatchSource:0}: Error finding container 09f6f092204e299d4ac1f9bc1db8b61d599a20924e144f2158084d134abeb5d1: Status 404 returned error can't find the container with id 09f6f092204e299d4ac1f9bc1db8b61d599a20924e144f2158084d134abeb5d1 Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.377684 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.378289 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.670614 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9q4mg" event={"ID":"6ad020b7-a243-46de-8a47-2bb8af6042a0","Type":"ContainerStarted","Data":"09f6f092204e299d4ac1f9bc1db8b61d599a20924e144f2158084d134abeb5d1"} Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.673943 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0288fa06-e56a-4201-a883-d1ece43562ac","Type":"ContainerStarted","Data":"6900e1ec5639f2c83d30721a5814cb95cab3bf7a8cc0c9cbf9928b4ac66a204d"} Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.674128 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0288fa06-e56a-4201-a883-d1ece43562ac","Type":"ContainerStarted","Data":"346de3d0ce99e51f0fb1d418ff4c47caf328b0ef3d49dc11ba98e094fa0b8e85"} Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.674391 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.676680 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-f8sjj" event={"ID":"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab","Type":"ContainerStarted","Data":"912fdde47aa8722999cd978b7ed27d2c40e10e85d16345668086c4ed9e3d7915"} Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.676765 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-4xhpv" Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.708624 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.820116455 podStartE2EDuration="5.708600903s" podCreationTimestamp="2025-09-29 14:02:17 +0000 UTC" firstStartedPulling="2025-09-29 14:02:18.847976695 +0000 UTC m=+1069.416704444" lastFinishedPulling="2025-09-29 14:02:21.736461143 +0000 UTC m=+1072.305188892" observedRunningTime="2025-09-29 14:02:22.70334847 +0000 UTC m=+1073.272076279" watchObservedRunningTime="2025-09-29 14:02:22.708600903 +0000 UTC m=+1073.277328652" Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.740184 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-f8sjj" podStartSLOduration=6.740151345 podStartE2EDuration="6.740151345s" podCreationTimestamp="2025-09-29 14:02:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:02:22.731800326 +0000 UTC m=+1073.300528085" watchObservedRunningTime="2025-09-29 14:02:22.740151345 +0000 UTC m=+1073.308879094" Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.790406 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-4xhpv"] Sep 29 14:02:22 crc kubenswrapper[4634]: I0929 14:02:22.798978 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-4xhpv"] Sep 29 14:02:23 crc kubenswrapper[4634]: I0929 14:02:23.685379 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.035256 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.036028 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.093902 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.127664 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d10ea94-d237-4954-8588-e5dfea94cae8" path="/var/lib/kubelet/pods/2d10ea94-d237-4954-8588-e5dfea94cae8/volumes" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.451344 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.528141 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.697013 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"02c51435-cae1-4758-a27a-6e461be7161b","Type":"ContainerStarted","Data":"f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03"} Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.698231 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.725297 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.717560628 podStartE2EDuration="49.725270049s" podCreationTimestamp="2025-09-29 14:01:35 +0000 UTC" firstStartedPulling="2025-09-29 14:01:36.503361808 +0000 UTC m=+1027.072089557" lastFinishedPulling="2025-09-29 14:02:23.511071229 +0000 UTC m=+1074.079798978" observedRunningTime="2025-09-29 14:02:24.718036973 +0000 UTC m=+1075.286764722" watchObservedRunningTime="2025-09-29 14:02:24.725270049 +0000 UTC m=+1075.293997788" Sep 29 14:02:24 crc kubenswrapper[4634]: I0929 14:02:24.766751 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 14:02:25 crc kubenswrapper[4634]: I0929 14:02:25.135289 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:25 crc kubenswrapper[4634]: E0929 14:02:25.135616 4634 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 14:02:25 crc kubenswrapper[4634]: E0929 14:02:25.135652 4634 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 14:02:25 crc kubenswrapper[4634]: E0929 14:02:25.135771 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift podName:80d6ffb7-ae89-453f-8694-074a86517297 nodeName:}" failed. No retries permitted until 2025-09-29 14:02:33.13572881 +0000 UTC m=+1083.704456559 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift") pod "swift-storage-0" (UID: "80d6ffb7-ae89-453f-8694-074a86517297") : configmap "swift-ring-files" not found Sep 29 14:02:25 crc kubenswrapper[4634]: I0929 14:02:25.474172 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:26 crc kubenswrapper[4634]: I0929 14:02:26.581283 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:02:26 crc kubenswrapper[4634]: I0929 14:02:26.636195 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-twmhj"] Sep 29 14:02:26 crc kubenswrapper[4634]: I0929 14:02:26.636734 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" podUID="4850732f-63f0-44a8-868b-ab1746b63f43" containerName="dnsmasq-dns" containerID="cri-o://764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31" gracePeriod=10 Sep 29 14:02:26 crc kubenswrapper[4634]: I0929 14:02:26.722837 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9q4mg" event={"ID":"6ad020b7-a243-46de-8a47-2bb8af6042a0","Type":"ContainerStarted","Data":"f358de159bba2717ce94b39c2e0be1c3e4822f6ab9bda97b8fe1f2c4fe3d7c89"} Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.093159 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.115836 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-9q4mg" podStartSLOduration=2.002992023 podStartE2EDuration="6.115812888s" podCreationTimestamp="2025-09-29 14:02:21 +0000 UTC" firstStartedPulling="2025-09-29 14:02:22.234979328 +0000 UTC m=+1072.803707077" lastFinishedPulling="2025-09-29 14:02:26.347800153 +0000 UTC m=+1076.916527942" observedRunningTime="2025-09-29 14:02:26.74863148 +0000 UTC m=+1077.317359229" watchObservedRunningTime="2025-09-29 14:02:27.115812888 +0000 UTC m=+1077.684540637" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.187838 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-sb\") pod \"4850732f-63f0-44a8-868b-ab1746b63f43\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.187935 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgv64\" (UniqueName: \"kubernetes.io/projected/4850732f-63f0-44a8-868b-ab1746b63f43-kube-api-access-fgv64\") pod \"4850732f-63f0-44a8-868b-ab1746b63f43\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.188005 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-config\") pod \"4850732f-63f0-44a8-868b-ab1746b63f43\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.188112 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-dns-svc\") pod \"4850732f-63f0-44a8-868b-ab1746b63f43\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.188133 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-nb\") pod \"4850732f-63f0-44a8-868b-ab1746b63f43\" (UID: \"4850732f-63f0-44a8-868b-ab1746b63f43\") " Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.193071 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4850732f-63f0-44a8-868b-ab1746b63f43-kube-api-access-fgv64" (OuterVolumeSpecName: "kube-api-access-fgv64") pod "4850732f-63f0-44a8-868b-ab1746b63f43" (UID: "4850732f-63f0-44a8-868b-ab1746b63f43"). InnerVolumeSpecName "kube-api-access-fgv64". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.226490 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4850732f-63f0-44a8-868b-ab1746b63f43" (UID: "4850732f-63f0-44a8-868b-ab1746b63f43"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.228118 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-config" (OuterVolumeSpecName: "config") pod "4850732f-63f0-44a8-868b-ab1746b63f43" (UID: "4850732f-63f0-44a8-868b-ab1746b63f43"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.235465 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4850732f-63f0-44a8-868b-ab1746b63f43" (UID: "4850732f-63f0-44a8-868b-ab1746b63f43"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.241731 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4850732f-63f0-44a8-868b-ab1746b63f43" (UID: "4850732f-63f0-44a8-868b-ab1746b63f43"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.290653 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgv64\" (UniqueName: \"kubernetes.io/projected/4850732f-63f0-44a8-868b-ab1746b63f43-kube-api-access-fgv64\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.290714 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.290727 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.290740 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.290753 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4850732f-63f0-44a8-868b-ab1746b63f43-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.733730 4634 generic.go:334] "Generic (PLEG): container finished" podID="4850732f-63f0-44a8-868b-ab1746b63f43" containerID="764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31" exitCode=0 Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.733833 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" event={"ID":"4850732f-63f0-44a8-868b-ab1746b63f43","Type":"ContainerDied","Data":"764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31"} Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.733889 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.733903 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-twmhj" event={"ID":"4850732f-63f0-44a8-868b-ab1746b63f43","Type":"ContainerDied","Data":"13c8519e604439870bf67258f791d4e023ab88e7fe0be5a88101e26ed1c4fddf"} Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.733932 4634 scope.go:117] "RemoveContainer" containerID="764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.765652 4634 scope.go:117] "RemoveContainer" containerID="5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.766263 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-twmhj"] Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.775491 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-twmhj"] Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.801466 4634 scope.go:117] "RemoveContainer" containerID="764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31" Sep 29 14:02:27 crc kubenswrapper[4634]: E0929 14:02:27.802239 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31\": container with ID starting with 764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31 not found: ID does not exist" containerID="764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.802299 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31"} err="failed to get container status \"764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31\": rpc error: code = NotFound desc = could not find container \"764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31\": container with ID starting with 764ad918131b79df757ee972add313e8a95a2aa3f28ff84e5748287c80a7bd31 not found: ID does not exist" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.802338 4634 scope.go:117] "RemoveContainer" containerID="5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c" Sep 29 14:02:27 crc kubenswrapper[4634]: E0929 14:02:27.802861 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c\": container with ID starting with 5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c not found: ID does not exist" containerID="5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c" Sep 29 14:02:27 crc kubenswrapper[4634]: I0929 14:02:27.802927 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c"} err="failed to get container status \"5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c\": rpc error: code = NotFound desc = could not find container \"5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c\": container with ID starting with 5fe08e9e9c274c157cb1c6718b872af92f3534dedd7bbb226dcd95ec50b1384c not found: ID does not exist" Sep 29 14:02:28 crc kubenswrapper[4634]: I0929 14:02:28.121354 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4850732f-63f0-44a8-868b-ab1746b63f43" path="/var/lib/kubelet/pods/4850732f-63f0-44a8-868b-ab1746b63f43/volumes" Sep 29 14:02:33 crc kubenswrapper[4634]: E0929 14:02:33.226145 4634 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 14:02:33 crc kubenswrapper[4634]: E0929 14:02:33.226832 4634 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 14:02:33 crc kubenswrapper[4634]: E0929 14:02:33.226916 4634 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift podName:80d6ffb7-ae89-453f-8694-074a86517297 nodeName:}" failed. No retries permitted until 2025-09-29 14:02:49.226890288 +0000 UTC m=+1099.795618077 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift") pod "swift-storage-0" (UID: "80d6ffb7-ae89-453f-8694-074a86517297") : configmap "swift-ring-files" not found Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.225888 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.393662 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.733624 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-vz7j6"] Sep 29 14:02:33 crc kubenswrapper[4634]: E0929 14:02:33.734319 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4850732f-63f0-44a8-868b-ab1746b63f43" containerName="dnsmasq-dns" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.734345 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4850732f-63f0-44a8-868b-ab1746b63f43" containerName="dnsmasq-dns" Sep 29 14:02:33 crc kubenswrapper[4634]: E0929 14:02:33.734377 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4850732f-63f0-44a8-868b-ab1746b63f43" containerName="init" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.734387 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4850732f-63f0-44a8-868b-ab1746b63f43" containerName="init" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.734600 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4850732f-63f0-44a8-868b-ab1746b63f43" containerName="dnsmasq-dns" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.735393 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vz7j6" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.747750 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-vz7j6"] Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.814850 4634 generic.go:334] "Generic (PLEG): container finished" podID="6ad020b7-a243-46de-8a47-2bb8af6042a0" containerID="f358de159bba2717ce94b39c2e0be1c3e4822f6ab9bda97b8fe1f2c4fe3d7c89" exitCode=0 Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.814908 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9q4mg" event={"ID":"6ad020b7-a243-46de-8a47-2bb8af6042a0","Type":"ContainerDied","Data":"f358de159bba2717ce94b39c2e0be1c3e4822f6ab9bda97b8fe1f2c4fe3d7c89"} Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.843110 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xndwk\" (UniqueName: \"kubernetes.io/projected/f77b2366-66fb-44ca-8b0d-6dabfbe40e25-kube-api-access-xndwk\") pod \"keystone-db-create-vz7j6\" (UID: \"f77b2366-66fb-44ca-8b0d-6dabfbe40e25\") " pod="openstack/keystone-db-create-vz7j6" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.945545 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xndwk\" (UniqueName: \"kubernetes.io/projected/f77b2366-66fb-44ca-8b0d-6dabfbe40e25-kube-api-access-xndwk\") pod \"keystone-db-create-vz7j6\" (UID: \"f77b2366-66fb-44ca-8b0d-6dabfbe40e25\") " pod="openstack/keystone-db-create-vz7j6" Sep 29 14:02:33 crc kubenswrapper[4634]: I0929 14:02:33.967538 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xndwk\" (UniqueName: \"kubernetes.io/projected/f77b2366-66fb-44ca-8b0d-6dabfbe40e25-kube-api-access-xndwk\") pod \"keystone-db-create-vz7j6\" (UID: \"f77b2366-66fb-44ca-8b0d-6dabfbe40e25\") " pod="openstack/keystone-db-create-vz7j6" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.110327 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vz7j6" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.268257 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-992nz"] Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.269588 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-992nz" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.288211 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-992nz"] Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.360526 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcv8x\" (UniqueName: \"kubernetes.io/projected/bd2d3c84-abb0-49b0-9d73-19c907c704e7-kube-api-access-hcv8x\") pod \"placement-db-create-992nz\" (UID: \"bd2d3c84-abb0-49b0-9d73-19c907c704e7\") " pod="openstack/placement-db-create-992nz" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.370302 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-6j4jw"] Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.371959 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6j4jw" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.379563 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6j4jw"] Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.463001 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcv8x\" (UniqueName: \"kubernetes.io/projected/bd2d3c84-abb0-49b0-9d73-19c907c704e7-kube-api-access-hcv8x\") pod \"placement-db-create-992nz\" (UID: \"bd2d3c84-abb0-49b0-9d73-19c907c704e7\") " pod="openstack/placement-db-create-992nz" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.463106 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxfqt\" (UniqueName: \"kubernetes.io/projected/49f0c54b-0171-4bea-954f-83babe332811-kube-api-access-qxfqt\") pod \"glance-db-create-6j4jw\" (UID: \"49f0c54b-0171-4bea-954f-83babe332811\") " pod="openstack/glance-db-create-6j4jw" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.484865 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcv8x\" (UniqueName: \"kubernetes.io/projected/bd2d3c84-abb0-49b0-9d73-19c907c704e7-kube-api-access-hcv8x\") pod \"placement-db-create-992nz\" (UID: \"bd2d3c84-abb0-49b0-9d73-19c907c704e7\") " pod="openstack/placement-db-create-992nz" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.565300 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxfqt\" (UniqueName: \"kubernetes.io/projected/49f0c54b-0171-4bea-954f-83babe332811-kube-api-access-qxfqt\") pod \"glance-db-create-6j4jw\" (UID: \"49f0c54b-0171-4bea-954f-83babe332811\") " pod="openstack/glance-db-create-6j4jw" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.584497 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxfqt\" (UniqueName: \"kubernetes.io/projected/49f0c54b-0171-4bea-954f-83babe332811-kube-api-access-qxfqt\") pod \"glance-db-create-6j4jw\" (UID: \"49f0c54b-0171-4bea-954f-83babe332811\") " pod="openstack/glance-db-create-6j4jw" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.593663 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-992nz" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.620345 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-vz7j6"] Sep 29 14:02:34 crc kubenswrapper[4634]: W0929 14:02:34.635332 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf77b2366_66fb_44ca_8b0d_6dabfbe40e25.slice/crio-a710a1ce5f5565ce2670683c7938d62e7c905838b2c42a9ca54c293a28fac4f3 WatchSource:0}: Error finding container a710a1ce5f5565ce2670683c7938d62e7c905838b2c42a9ca54c293a28fac4f3: Status 404 returned error can't find the container with id a710a1ce5f5565ce2670683c7938d62e7c905838b2c42a9ca54c293a28fac4f3 Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.696201 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6j4jw" Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.832810 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vz7j6" event={"ID":"f77b2366-66fb-44ca-8b0d-6dabfbe40e25","Type":"ContainerStarted","Data":"2b4ebe8a759619ad39585392d198aa2465b2c57af848ea24f0824946813034b2"} Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.834532 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vz7j6" event={"ID":"f77b2366-66fb-44ca-8b0d-6dabfbe40e25","Type":"ContainerStarted","Data":"a710a1ce5f5565ce2670683c7938d62e7c905838b2c42a9ca54c293a28fac4f3"} Sep 29 14:02:34 crc kubenswrapper[4634]: I0929 14:02:34.856127 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-vz7j6" podStartSLOduration=1.856109094 podStartE2EDuration="1.856109094s" podCreationTimestamp="2025-09-29 14:02:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:02:34.854538491 +0000 UTC m=+1085.423266240" watchObservedRunningTime="2025-09-29 14:02:34.856109094 +0000 UTC m=+1085.424836843" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.048034 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-992nz"] Sep 29 14:02:35 crc kubenswrapper[4634]: W0929 14:02:35.087444 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd2d3c84_abb0_49b0_9d73_19c907c704e7.slice/crio-e0a9d9949afe9adc7f24d2abf21cdc5564ec26ba11f970122edd6f725c586a55 WatchSource:0}: Error finding container e0a9d9949afe9adc7f24d2abf21cdc5564ec26ba11f970122edd6f725c586a55: Status 404 returned error can't find the container with id e0a9d9949afe9adc7f24d2abf21cdc5564ec26ba11f970122edd6f725c586a55 Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.227130 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6j4jw"] Sep 29 14:02:35 crc kubenswrapper[4634]: W0929 14:02:35.237405 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49f0c54b_0171_4bea_954f_83babe332811.slice/crio-505893f4cbda363bac4c7729462bdfa829dea5e64e26f72b5a1d598a0c0fc4c6 WatchSource:0}: Error finding container 505893f4cbda363bac4c7729462bdfa829dea5e64e26f72b5a1d598a0c0fc4c6: Status 404 returned error can't find the container with id 505893f4cbda363bac4c7729462bdfa829dea5e64e26f72b5a1d598a0c0fc4c6 Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.370217 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.394129 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-scripts\") pod \"6ad020b7-a243-46de-8a47-2bb8af6042a0\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.394250 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slj5j\" (UniqueName: \"kubernetes.io/projected/6ad020b7-a243-46de-8a47-2bb8af6042a0-kube-api-access-slj5j\") pod \"6ad020b7-a243-46de-8a47-2bb8af6042a0\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.394347 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-swiftconf\") pod \"6ad020b7-a243-46de-8a47-2bb8af6042a0\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.394399 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-dispersionconf\") pod \"6ad020b7-a243-46de-8a47-2bb8af6042a0\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.394439 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-combined-ca-bundle\") pod \"6ad020b7-a243-46de-8a47-2bb8af6042a0\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.394478 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-ring-data-devices\") pod \"6ad020b7-a243-46de-8a47-2bb8af6042a0\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.394571 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6ad020b7-a243-46de-8a47-2bb8af6042a0-etc-swift\") pod \"6ad020b7-a243-46de-8a47-2bb8af6042a0\" (UID: \"6ad020b7-a243-46de-8a47-2bb8af6042a0\") " Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.398147 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "6ad020b7-a243-46de-8a47-2bb8af6042a0" (UID: "6ad020b7-a243-46de-8a47-2bb8af6042a0"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.400157 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ad020b7-a243-46de-8a47-2bb8af6042a0-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6ad020b7-a243-46de-8a47-2bb8af6042a0" (UID: "6ad020b7-a243-46de-8a47-2bb8af6042a0"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.406926 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ad020b7-a243-46de-8a47-2bb8af6042a0-kube-api-access-slj5j" (OuterVolumeSpecName: "kube-api-access-slj5j") pod "6ad020b7-a243-46de-8a47-2bb8af6042a0" (UID: "6ad020b7-a243-46de-8a47-2bb8af6042a0"). InnerVolumeSpecName "kube-api-access-slj5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.420584 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "6ad020b7-a243-46de-8a47-2bb8af6042a0" (UID: "6ad020b7-a243-46de-8a47-2bb8af6042a0"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.453617 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-scripts" (OuterVolumeSpecName: "scripts") pod "6ad020b7-a243-46de-8a47-2bb8af6042a0" (UID: "6ad020b7-a243-46de-8a47-2bb8af6042a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.471094 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ad020b7-a243-46de-8a47-2bb8af6042a0" (UID: "6ad020b7-a243-46de-8a47-2bb8af6042a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.476172 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "6ad020b7-a243-46de-8a47-2bb8af6042a0" (UID: "6ad020b7-a243-46de-8a47-2bb8af6042a0"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.496067 4634 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.496135 4634 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.496152 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ad020b7-a243-46de-8a47-2bb8af6042a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.496191 4634 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.496204 4634 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6ad020b7-a243-46de-8a47-2bb8af6042a0-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.496215 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ad020b7-a243-46de-8a47-2bb8af6042a0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.496229 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slj5j\" (UniqueName: \"kubernetes.io/projected/6ad020b7-a243-46de-8a47-2bb8af6042a0-kube-api-access-slj5j\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.846587 4634 generic.go:334] "Generic (PLEG): container finished" podID="bd2d3c84-abb0-49b0-9d73-19c907c704e7" containerID="66251841b61043a3040ed6c7ffd32fbea9a5edcde9ab823412f6787a37c506b5" exitCode=0 Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.847009 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-992nz" event={"ID":"bd2d3c84-abb0-49b0-9d73-19c907c704e7","Type":"ContainerDied","Data":"66251841b61043a3040ed6c7ffd32fbea9a5edcde9ab823412f6787a37c506b5"} Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.847143 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-992nz" event={"ID":"bd2d3c84-abb0-49b0-9d73-19c907c704e7","Type":"ContainerStarted","Data":"e0a9d9949afe9adc7f24d2abf21cdc5564ec26ba11f970122edd6f725c586a55"} Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.855287 4634 generic.go:334] "Generic (PLEG): container finished" podID="49f0c54b-0171-4bea-954f-83babe332811" containerID="d1fec606ae3f0fa6fb22c3e28a28c665404818614f5e8e36917d5c817fa61ead" exitCode=0 Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.855755 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6j4jw" event={"ID":"49f0c54b-0171-4bea-954f-83babe332811","Type":"ContainerDied","Data":"d1fec606ae3f0fa6fb22c3e28a28c665404818614f5e8e36917d5c817fa61ead"} Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.855806 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6j4jw" event={"ID":"49f0c54b-0171-4bea-954f-83babe332811","Type":"ContainerStarted","Data":"505893f4cbda363bac4c7729462bdfa829dea5e64e26f72b5a1d598a0c0fc4c6"} Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.874393 4634 generic.go:334] "Generic (PLEG): container finished" podID="f77b2366-66fb-44ca-8b0d-6dabfbe40e25" containerID="2b4ebe8a759619ad39585392d198aa2465b2c57af848ea24f0824946813034b2" exitCode=0 Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.874445 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vz7j6" event={"ID":"f77b2366-66fb-44ca-8b0d-6dabfbe40e25","Type":"ContainerDied","Data":"2b4ebe8a759619ad39585392d198aa2465b2c57af848ea24f0824946813034b2"} Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.883767 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-9q4mg" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.904697 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-9q4mg" event={"ID":"6ad020b7-a243-46de-8a47-2bb8af6042a0","Type":"ContainerDied","Data":"09f6f092204e299d4ac1f9bc1db8b61d599a20924e144f2158084d134abeb5d1"} Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.905264 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09f6f092204e299d4ac1f9bc1db8b61d599a20924e144f2158084d134abeb5d1" Sep 29 14:02:35 crc kubenswrapper[4634]: I0929 14:02:35.905368 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 14:02:36 crc kubenswrapper[4634]: E0929 14:02:36.115474 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ad020b7_a243_46de_8a47_2bb8af6042a0.slice\": RecentStats: unable to find data in memory cache]" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.289503 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6j4jw" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.397477 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-992nz" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.402702 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vz7j6" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.451708 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxfqt\" (UniqueName: \"kubernetes.io/projected/49f0c54b-0171-4bea-954f-83babe332811-kube-api-access-qxfqt\") pod \"49f0c54b-0171-4bea-954f-83babe332811\" (UID: \"49f0c54b-0171-4bea-954f-83babe332811\") " Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.475010 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49f0c54b-0171-4bea-954f-83babe332811-kube-api-access-qxfqt" (OuterVolumeSpecName: "kube-api-access-qxfqt") pod "49f0c54b-0171-4bea-954f-83babe332811" (UID: "49f0c54b-0171-4bea-954f-83babe332811"). InnerVolumeSpecName "kube-api-access-qxfqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.554141 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xndwk\" (UniqueName: \"kubernetes.io/projected/f77b2366-66fb-44ca-8b0d-6dabfbe40e25-kube-api-access-xndwk\") pod \"f77b2366-66fb-44ca-8b0d-6dabfbe40e25\" (UID: \"f77b2366-66fb-44ca-8b0d-6dabfbe40e25\") " Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.554383 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcv8x\" (UniqueName: \"kubernetes.io/projected/bd2d3c84-abb0-49b0-9d73-19c907c704e7-kube-api-access-hcv8x\") pod \"bd2d3c84-abb0-49b0-9d73-19c907c704e7\" (UID: \"bd2d3c84-abb0-49b0-9d73-19c907c704e7\") " Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.555230 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxfqt\" (UniqueName: \"kubernetes.io/projected/49f0c54b-0171-4bea-954f-83babe332811-kube-api-access-qxfqt\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.559773 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f77b2366-66fb-44ca-8b0d-6dabfbe40e25-kube-api-access-xndwk" (OuterVolumeSpecName: "kube-api-access-xndwk") pod "f77b2366-66fb-44ca-8b0d-6dabfbe40e25" (UID: "f77b2366-66fb-44ca-8b0d-6dabfbe40e25"). InnerVolumeSpecName "kube-api-access-xndwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.559835 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd2d3c84-abb0-49b0-9d73-19c907c704e7-kube-api-access-hcv8x" (OuterVolumeSpecName: "kube-api-access-hcv8x") pod "bd2d3c84-abb0-49b0-9d73-19c907c704e7" (UID: "bd2d3c84-abb0-49b0-9d73-19c907c704e7"). InnerVolumeSpecName "kube-api-access-hcv8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.658239 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcv8x\" (UniqueName: \"kubernetes.io/projected/bd2d3c84-abb0-49b0-9d73-19c907c704e7-kube-api-access-hcv8x\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.659450 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xndwk\" (UniqueName: \"kubernetes.io/projected/f77b2366-66fb-44ca-8b0d-6dabfbe40e25-kube-api-access-xndwk\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.906459 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6j4jw" event={"ID":"49f0c54b-0171-4bea-954f-83babe332811","Type":"ContainerDied","Data":"505893f4cbda363bac4c7729462bdfa829dea5e64e26f72b5a1d598a0c0fc4c6"} Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.906702 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6j4jw" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.906720 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="505893f4cbda363bac4c7729462bdfa829dea5e64e26f72b5a1d598a0c0fc4c6" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.908913 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-vz7j6" event={"ID":"f77b2366-66fb-44ca-8b0d-6dabfbe40e25","Type":"ContainerDied","Data":"a710a1ce5f5565ce2670683c7938d62e7c905838b2c42a9ca54c293a28fac4f3"} Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.908937 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a710a1ce5f5565ce2670683c7938d62e7c905838b2c42a9ca54c293a28fac4f3" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.908999 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-vz7j6" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.914807 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-992nz" event={"ID":"bd2d3c84-abb0-49b0-9d73-19c907c704e7","Type":"ContainerDied","Data":"e0a9d9949afe9adc7f24d2abf21cdc5564ec26ba11f970122edd6f725c586a55"} Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.915040 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0a9d9949afe9adc7f24d2abf21cdc5564ec26ba11f970122edd6f725c586a55" Sep 29 14:02:37 crc kubenswrapper[4634]: I0929 14:02:37.914909 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-992nz" Sep 29 14:02:40 crc kubenswrapper[4634]: I0929 14:02:40.050845 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-lfvq4" podUID="07a47ca0-1cd2-4e8d-92ce-37083cde3744" containerName="ovn-controller" probeResult="failure" output=< Sep 29 14:02:40 crc kubenswrapper[4634]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 14:02:40 crc kubenswrapper[4634]: > Sep 29 14:02:40 crc kubenswrapper[4634]: I0929 14:02:40.951790 4634 generic.go:334] "Generic (PLEG): container finished" podID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerID="a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6" exitCode=0 Sep 29 14:02:40 crc kubenswrapper[4634]: I0929 14:02:40.951861 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd3a9c91-300c-4510-b7a4-03cf8cbbe729","Type":"ContainerDied","Data":"a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6"} Sep 29 14:02:41 crc kubenswrapper[4634]: I0929 14:02:41.962329 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd3a9c91-300c-4510-b7a4-03cf8cbbe729","Type":"ContainerStarted","Data":"6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0"} Sep 29 14:02:41 crc kubenswrapper[4634]: I0929 14:02:41.963268 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 14:02:42 crc kubenswrapper[4634]: I0929 14:02:42.003148 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=35.434698088 podStartE2EDuration="1m13.003113005s" podCreationTimestamp="2025-09-29 14:01:29 +0000 UTC" firstStartedPulling="2025-09-29 14:01:31.963692293 +0000 UTC m=+1022.532420042" lastFinishedPulling="2025-09-29 14:02:09.53210721 +0000 UTC m=+1060.100834959" observedRunningTime="2025-09-29 14:02:41.996407863 +0000 UTC m=+1092.565135652" watchObservedRunningTime="2025-09-29 14:02:42.003113005 +0000 UTC m=+1092.571840794" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.945875 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-950d-account-create-n7qrh"] Sep 29 14:02:43 crc kubenswrapper[4634]: E0929 14:02:43.946243 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd2d3c84-abb0-49b0-9d73-19c907c704e7" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.946256 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd2d3c84-abb0-49b0-9d73-19c907c704e7" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: E0929 14:02:43.946283 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f0c54b-0171-4bea-954f-83babe332811" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.946289 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f0c54b-0171-4bea-954f-83babe332811" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: E0929 14:02:43.946302 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ad020b7-a243-46de-8a47-2bb8af6042a0" containerName="swift-ring-rebalance" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.946309 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ad020b7-a243-46de-8a47-2bb8af6042a0" containerName="swift-ring-rebalance" Sep 29 14:02:43 crc kubenswrapper[4634]: E0929 14:02:43.946319 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f77b2366-66fb-44ca-8b0d-6dabfbe40e25" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.946326 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="f77b2366-66fb-44ca-8b0d-6dabfbe40e25" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.946492 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="f77b2366-66fb-44ca-8b0d-6dabfbe40e25" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.946508 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f0c54b-0171-4bea-954f-83babe332811" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.946515 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd2d3c84-abb0-49b0-9d73-19c907c704e7" containerName="mariadb-database-create" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.946530 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ad020b7-a243-46de-8a47-2bb8af6042a0" containerName="swift-ring-rebalance" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.947141 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-950d-account-create-n7qrh" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.949957 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 14:02:43 crc kubenswrapper[4634]: I0929 14:02:43.972799 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-950d-account-create-n7qrh"] Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.093325 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7hmn\" (UniqueName: \"kubernetes.io/projected/0ab322f8-39ae-4378-8849-529322d8581b-kube-api-access-n7hmn\") pod \"keystone-950d-account-create-n7qrh\" (UID: \"0ab322f8-39ae-4378-8849-529322d8581b\") " pod="openstack/keystone-950d-account-create-n7qrh" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.195727 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7hmn\" (UniqueName: \"kubernetes.io/projected/0ab322f8-39ae-4378-8849-529322d8581b-kube-api-access-n7hmn\") pod \"keystone-950d-account-create-n7qrh\" (UID: \"0ab322f8-39ae-4378-8849-529322d8581b\") " pod="openstack/keystone-950d-account-create-n7qrh" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.228011 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7hmn\" (UniqueName: \"kubernetes.io/projected/0ab322f8-39ae-4378-8849-529322d8581b-kube-api-access-n7hmn\") pod \"keystone-950d-account-create-n7qrh\" (UID: \"0ab322f8-39ae-4378-8849-529322d8581b\") " pod="openstack/keystone-950d-account-create-n7qrh" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.268030 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-950d-account-create-n7qrh" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.396406 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.396475 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.488585 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-23d6-account-create-wn7d9"] Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.492647 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-23d6-account-create-wn7d9" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.501645 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.516216 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-23d6-account-create-wn7d9"] Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.604106 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cjqw\" (UniqueName: \"kubernetes.io/projected/896530d1-39a6-4aaf-b8d8-6cd06077f03d-kube-api-access-7cjqw\") pod \"placement-23d6-account-create-wn7d9\" (UID: \"896530d1-39a6-4aaf-b8d8-6cd06077f03d\") " pod="openstack/placement-23d6-account-create-wn7d9" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.654209 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-950d-account-create-n7qrh"] Sep 29 14:02:44 crc kubenswrapper[4634]: W0929 14:02:44.673698 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ab322f8_39ae_4378_8849_529322d8581b.slice/crio-1690b388357df18858ee83908cb7a8f32bf744331cdc42666c26ed1e97d50e6c WatchSource:0}: Error finding container 1690b388357df18858ee83908cb7a8f32bf744331cdc42666c26ed1e97d50e6c: Status 404 returned error can't find the container with id 1690b388357df18858ee83908cb7a8f32bf744331cdc42666c26ed1e97d50e6c Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.681009 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-58a9-account-create-zghfd"] Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.682267 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-58a9-account-create-zghfd" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.683920 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.696157 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-58a9-account-create-zghfd"] Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.707856 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cjqw\" (UniqueName: \"kubernetes.io/projected/896530d1-39a6-4aaf-b8d8-6cd06077f03d-kube-api-access-7cjqw\") pod \"placement-23d6-account-create-wn7d9\" (UID: \"896530d1-39a6-4aaf-b8d8-6cd06077f03d\") " pod="openstack/placement-23d6-account-create-wn7d9" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.730697 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cjqw\" (UniqueName: \"kubernetes.io/projected/896530d1-39a6-4aaf-b8d8-6cd06077f03d-kube-api-access-7cjqw\") pod \"placement-23d6-account-create-wn7d9\" (UID: \"896530d1-39a6-4aaf-b8d8-6cd06077f03d\") " pod="openstack/placement-23d6-account-create-wn7d9" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.809918 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8dnq\" (UniqueName: \"kubernetes.io/projected/52ec169b-8f31-4857-9128-20365f472af1-kube-api-access-l8dnq\") pod \"glance-58a9-account-create-zghfd\" (UID: \"52ec169b-8f31-4857-9128-20365f472af1\") " pod="openstack/glance-58a9-account-create-zghfd" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.846758 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-23d6-account-create-wn7d9" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.912256 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8dnq\" (UniqueName: \"kubernetes.io/projected/52ec169b-8f31-4857-9128-20365f472af1-kube-api-access-l8dnq\") pod \"glance-58a9-account-create-zghfd\" (UID: \"52ec169b-8f31-4857-9128-20365f472af1\") " pod="openstack/glance-58a9-account-create-zghfd" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.936719 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8dnq\" (UniqueName: \"kubernetes.io/projected/52ec169b-8f31-4857-9128-20365f472af1-kube-api-access-l8dnq\") pod \"glance-58a9-account-create-zghfd\" (UID: \"52ec169b-8f31-4857-9128-20365f472af1\") " pod="openstack/glance-58a9-account-create-zghfd" Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.998075 4634 generic.go:334] "Generic (PLEG): container finished" podID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerID="0617af5024010462e3ff97d997662259c347192a23b50e962760fa8e5e1b4604" exitCode=0 Sep 29 14:02:44 crc kubenswrapper[4634]: I0929 14:02:44.998159 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8efec8a2-4905-4ba0-b777-d4e2cd393bd6","Type":"ContainerDied","Data":"0617af5024010462e3ff97d997662259c347192a23b50e962760fa8e5e1b4604"} Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.002647 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-950d-account-create-n7qrh" event={"ID":"0ab322f8-39ae-4378-8849-529322d8581b","Type":"ContainerStarted","Data":"1690b388357df18858ee83908cb7a8f32bf744331cdc42666c26ed1e97d50e6c"} Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.043021 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-lfvq4" podUID="07a47ca0-1cd2-4e8d-92ce-37083cde3744" containerName="ovn-controller" probeResult="failure" output=< Sep 29 14:02:45 crc kubenswrapper[4634]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 14:02:45 crc kubenswrapper[4634]: > Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.048051 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-58a9-account-create-zghfd" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.092155 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.109739 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-8xcvg" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.347681 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lfvq4-config-gf7hs"] Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.355311 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.360509 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.403779 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lfvq4-config-gf7hs"] Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.419041 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-23d6-account-create-wn7d9"] Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.432794 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.432858 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-log-ovn\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.432892 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-scripts\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.432915 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7c44\" (UniqueName: \"kubernetes.io/projected/d221b5c7-1165-4965-b7c0-0511dafce830-kube-api-access-d7c44\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.432977 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-additional-scripts\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.433003 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run-ovn\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.548994 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-58a9-account-create-zghfd"] Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.552930 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-log-ovn\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.552993 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-scripts\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.553026 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7c44\" (UniqueName: \"kubernetes.io/projected/d221b5c7-1165-4965-b7c0-0511dafce830-kube-api-access-d7c44\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.553104 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-additional-scripts\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.553138 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run-ovn\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.553217 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.553589 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.554208 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-log-ovn\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.556096 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-scripts\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.558441 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run-ovn\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.563591 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-additional-scripts\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.586032 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7c44\" (UniqueName: \"kubernetes.io/projected/d221b5c7-1165-4965-b7c0-0511dafce830-kube-api-access-d7c44\") pod \"ovn-controller-lfvq4-config-gf7hs\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:45 crc kubenswrapper[4634]: W0929 14:02:45.589390 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52ec169b_8f31_4857_9128_20365f472af1.slice/crio-4c61d5d5e8fb3729dc385108873b3502a5376a12f8d7c21aecf8225d9e7d6269 WatchSource:0}: Error finding container 4c61d5d5e8fb3729dc385108873b3502a5376a12f8d7c21aecf8225d9e7d6269: Status 404 returned error can't find the container with id 4c61d5d5e8fb3729dc385108873b3502a5376a12f8d7c21aecf8225d9e7d6269 Sep 29 14:02:45 crc kubenswrapper[4634]: I0929 14:02:45.711216 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.019513 4634 generic.go:334] "Generic (PLEG): container finished" podID="896530d1-39a6-4aaf-b8d8-6cd06077f03d" containerID="790cb29278d2c5e47e79cd2404fedfb7e78756e0e2f7da3fc8559e85a356b633" exitCode=0 Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.019945 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-23d6-account-create-wn7d9" event={"ID":"896530d1-39a6-4aaf-b8d8-6cd06077f03d","Type":"ContainerDied","Data":"790cb29278d2c5e47e79cd2404fedfb7e78756e0e2f7da3fc8559e85a356b633"} Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.019987 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-23d6-account-create-wn7d9" event={"ID":"896530d1-39a6-4aaf-b8d8-6cd06077f03d","Type":"ContainerStarted","Data":"23e2858b010ab3e4e073d586ed91ef68e98feb1384b8752556378c56c3f46412"} Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.024864 4634 generic.go:334] "Generic (PLEG): container finished" podID="52ec169b-8f31-4857-9128-20365f472af1" containerID="f356dcedf361bfd1962700067b94718a478c4eb5d2e5a13c5f54fa05427a45c0" exitCode=0 Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.024941 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-58a9-account-create-zghfd" event={"ID":"52ec169b-8f31-4857-9128-20365f472af1","Type":"ContainerDied","Data":"f356dcedf361bfd1962700067b94718a478c4eb5d2e5a13c5f54fa05427a45c0"} Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.024979 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-58a9-account-create-zghfd" event={"ID":"52ec169b-8f31-4857-9128-20365f472af1","Type":"ContainerStarted","Data":"4c61d5d5e8fb3729dc385108873b3502a5376a12f8d7c21aecf8225d9e7d6269"} Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.028609 4634 generic.go:334] "Generic (PLEG): container finished" podID="0ab322f8-39ae-4378-8849-529322d8581b" containerID="72757820eb641bfc36cca0165ef8113f9cf429633cf3e888413b28f73e938bd3" exitCode=0 Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.028676 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-950d-account-create-n7qrh" event={"ID":"0ab322f8-39ae-4378-8849-529322d8581b","Type":"ContainerDied","Data":"72757820eb641bfc36cca0165ef8113f9cf429633cf3e888413b28f73e938bd3"} Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.031404 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8efec8a2-4905-4ba0-b777-d4e2cd393bd6","Type":"ContainerStarted","Data":"6afea5db50bc1e064128175751bff751b672b7f268ac3699275fe23fdbe35322"} Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.032442 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.127627 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371958.72717 podStartE2EDuration="1m18.127605009s" podCreationTimestamp="2025-09-29 14:01:28 +0000 UTC" firstStartedPulling="2025-09-29 14:01:31.490317826 +0000 UTC m=+1022.059045565" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:02:46.122732317 +0000 UTC m=+1096.691460066" watchObservedRunningTime="2025-09-29 14:02:46.127605009 +0000 UTC m=+1096.696332758" Sep 29 14:02:46 crc kubenswrapper[4634]: I0929 14:02:46.271740 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lfvq4-config-gf7hs"] Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.040607 4634 generic.go:334] "Generic (PLEG): container finished" podID="d221b5c7-1165-4965-b7c0-0511dafce830" containerID="e0c764d7d4ed23a6a5c1128184699ea4593dc70f0f9b4134ef1f7d99594ef8a6" exitCode=0 Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.040683 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lfvq4-config-gf7hs" event={"ID":"d221b5c7-1165-4965-b7c0-0511dafce830","Type":"ContainerDied","Data":"e0c764d7d4ed23a6a5c1128184699ea4593dc70f0f9b4134ef1f7d99594ef8a6"} Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.041099 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lfvq4-config-gf7hs" event={"ID":"d221b5c7-1165-4965-b7c0-0511dafce830","Type":"ContainerStarted","Data":"f410a72f5ac5637b2aea554343841a99cda11022fe5bd68c50b317b72bc2edcf"} Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.493371 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-23d6-account-create-wn7d9" Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.597119 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-58a9-account-create-zghfd" Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.599373 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cjqw\" (UniqueName: \"kubernetes.io/projected/896530d1-39a6-4aaf-b8d8-6cd06077f03d-kube-api-access-7cjqw\") pod \"896530d1-39a6-4aaf-b8d8-6cd06077f03d\" (UID: \"896530d1-39a6-4aaf-b8d8-6cd06077f03d\") " Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.609748 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-950d-account-create-n7qrh" Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.625865 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/896530d1-39a6-4aaf-b8d8-6cd06077f03d-kube-api-access-7cjqw" (OuterVolumeSpecName: "kube-api-access-7cjqw") pod "896530d1-39a6-4aaf-b8d8-6cd06077f03d" (UID: "896530d1-39a6-4aaf-b8d8-6cd06077f03d"). InnerVolumeSpecName "kube-api-access-7cjqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.701925 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8dnq\" (UniqueName: \"kubernetes.io/projected/52ec169b-8f31-4857-9128-20365f472af1-kube-api-access-l8dnq\") pod \"52ec169b-8f31-4857-9128-20365f472af1\" (UID: \"52ec169b-8f31-4857-9128-20365f472af1\") " Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.701981 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7hmn\" (UniqueName: \"kubernetes.io/projected/0ab322f8-39ae-4378-8849-529322d8581b-kube-api-access-n7hmn\") pod \"0ab322f8-39ae-4378-8849-529322d8581b\" (UID: \"0ab322f8-39ae-4378-8849-529322d8581b\") " Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.702367 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cjqw\" (UniqueName: \"kubernetes.io/projected/896530d1-39a6-4aaf-b8d8-6cd06077f03d-kube-api-access-7cjqw\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.707354 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ab322f8-39ae-4378-8849-529322d8581b-kube-api-access-n7hmn" (OuterVolumeSpecName: "kube-api-access-n7hmn") pod "0ab322f8-39ae-4378-8849-529322d8581b" (UID: "0ab322f8-39ae-4378-8849-529322d8581b"). InnerVolumeSpecName "kube-api-access-n7hmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.707402 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52ec169b-8f31-4857-9128-20365f472af1-kube-api-access-l8dnq" (OuterVolumeSpecName: "kube-api-access-l8dnq") pod "52ec169b-8f31-4857-9128-20365f472af1" (UID: "52ec169b-8f31-4857-9128-20365f472af1"). InnerVolumeSpecName "kube-api-access-l8dnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.818749 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8dnq\" (UniqueName: \"kubernetes.io/projected/52ec169b-8f31-4857-9128-20365f472af1-kube-api-access-l8dnq\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:47 crc kubenswrapper[4634]: I0929 14:02:47.818821 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7hmn\" (UniqueName: \"kubernetes.io/projected/0ab322f8-39ae-4378-8849-529322d8581b-kube-api-access-n7hmn\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.055677 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-950d-account-create-n7qrh" event={"ID":"0ab322f8-39ae-4378-8849-529322d8581b","Type":"ContainerDied","Data":"1690b388357df18858ee83908cb7a8f32bf744331cdc42666c26ed1e97d50e6c"} Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.057280 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1690b388357df18858ee83908cb7a8f32bf744331cdc42666c26ed1e97d50e6c" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.055823 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-950d-account-create-n7qrh" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.059728 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-23d6-account-create-wn7d9" event={"ID":"896530d1-39a6-4aaf-b8d8-6cd06077f03d","Type":"ContainerDied","Data":"23e2858b010ab3e4e073d586ed91ef68e98feb1384b8752556378c56c3f46412"} Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.059785 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23e2858b010ab3e4e073d586ed91ef68e98feb1384b8752556378c56c3f46412" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.059739 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-23d6-account-create-wn7d9" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.062487 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-58a9-account-create-zghfd" event={"ID":"52ec169b-8f31-4857-9128-20365f472af1","Type":"ContainerDied","Data":"4c61d5d5e8fb3729dc385108873b3502a5376a12f8d7c21aecf8225d9e7d6269"} Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.062545 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c61d5d5e8fb3729dc385108873b3502a5376a12f8d7c21aecf8225d9e7d6269" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.062747 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-58a9-account-create-zghfd" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.497652 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.650408 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-log-ovn\") pod \"d221b5c7-1165-4965-b7c0-0511dafce830\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.650579 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-scripts\") pod \"d221b5c7-1165-4965-b7c0-0511dafce830\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.650733 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run\") pod \"d221b5c7-1165-4965-b7c0-0511dafce830\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.650754 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run-ovn\") pod \"d221b5c7-1165-4965-b7c0-0511dafce830\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.650851 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7c44\" (UniqueName: \"kubernetes.io/projected/d221b5c7-1165-4965-b7c0-0511dafce830-kube-api-access-d7c44\") pod \"d221b5c7-1165-4965-b7c0-0511dafce830\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.650872 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-additional-scripts\") pod \"d221b5c7-1165-4965-b7c0-0511dafce830\" (UID: \"d221b5c7-1165-4965-b7c0-0511dafce830\") " Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.651130 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run" (OuterVolumeSpecName: "var-run") pod "d221b5c7-1165-4965-b7c0-0511dafce830" (UID: "d221b5c7-1165-4965-b7c0-0511dafce830"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.651230 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d221b5c7-1165-4965-b7c0-0511dafce830" (UID: "d221b5c7-1165-4965-b7c0-0511dafce830"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.652271 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d221b5c7-1165-4965-b7c0-0511dafce830" (UID: "d221b5c7-1165-4965-b7c0-0511dafce830"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.652564 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-scripts" (OuterVolumeSpecName: "scripts") pod "d221b5c7-1165-4965-b7c0-0511dafce830" (UID: "d221b5c7-1165-4965-b7c0-0511dafce830"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.652553 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "d221b5c7-1165-4965-b7c0-0511dafce830" (UID: "d221b5c7-1165-4965-b7c0-0511dafce830"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.652748 4634 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.652767 4634 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.652796 4634 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.652809 4634 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d221b5c7-1165-4965-b7c0-0511dafce830-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.652818 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d221b5c7-1165-4965-b7c0-0511dafce830-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.665295 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d221b5c7-1165-4965-b7c0-0511dafce830-kube-api-access-d7c44" (OuterVolumeSpecName: "kube-api-access-d7c44") pod "d221b5c7-1165-4965-b7c0-0511dafce830" (UID: "d221b5c7-1165-4965-b7c0-0511dafce830"). InnerVolumeSpecName "kube-api-access-d7c44". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:48 crc kubenswrapper[4634]: I0929 14:02:48.755028 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7c44\" (UniqueName: \"kubernetes.io/projected/d221b5c7-1165-4965-b7c0-0511dafce830-kube-api-access-d7c44\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.079572 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lfvq4-config-gf7hs" event={"ID":"d221b5c7-1165-4965-b7c0-0511dafce830","Type":"ContainerDied","Data":"f410a72f5ac5637b2aea554343841a99cda11022fe5bd68c50b317b72bc2edcf"} Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.079623 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f410a72f5ac5637b2aea554343841a99cda11022fe5bd68c50b317b72bc2edcf" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.079788 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4-config-gf7hs" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.266761 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.272744 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/80d6ffb7-ae89-453f-8694-074a86517297-etc-swift\") pod \"swift-storage-0\" (UID: \"80d6ffb7-ae89-453f-8694-074a86517297\") " pod="openstack/swift-storage-0" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.369485 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.641962 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lfvq4-config-gf7hs"] Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.665790 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-lfvq4-config-gf7hs"] Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.736667 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-lfvq4-config-6xsq8"] Sep 29 14:02:49 crc kubenswrapper[4634]: E0929 14:02:49.737071 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52ec169b-8f31-4857-9128-20365f472af1" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737105 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="52ec169b-8f31-4857-9128-20365f472af1" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: E0929 14:02:49.737122 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ab322f8-39ae-4378-8849-529322d8581b" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737128 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ab322f8-39ae-4378-8849-529322d8581b" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: E0929 14:02:49.737137 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="896530d1-39a6-4aaf-b8d8-6cd06077f03d" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737144 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="896530d1-39a6-4aaf-b8d8-6cd06077f03d" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: E0929 14:02:49.737156 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d221b5c7-1165-4965-b7c0-0511dafce830" containerName="ovn-config" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737161 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d221b5c7-1165-4965-b7c0-0511dafce830" containerName="ovn-config" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737326 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="52ec169b-8f31-4857-9128-20365f472af1" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737338 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="896530d1-39a6-4aaf-b8d8-6cd06077f03d" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737350 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="d221b5c7-1165-4965-b7c0-0511dafce830" containerName="ovn-config" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737357 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ab322f8-39ae-4378-8849-529322d8581b" containerName="mariadb-account-create" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.737930 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.742557 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.761402 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lfvq4-config-6xsq8"] Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.877770 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-additional-scripts\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.878271 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.878331 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run-ovn\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.878361 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvdg5\" (UniqueName: \"kubernetes.io/projected/521e2d50-f7e0-4040-a6b8-1d601ab73282-kube-api-access-tvdg5\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.878387 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-log-ovn\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.878436 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-scripts\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.907327 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-25zsb"] Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.908594 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.912113 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.915073 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mdc99" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.921067 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-25zsb"] Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.979627 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-log-ovn\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.979699 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-scripts\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.979750 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-additional-scripts\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.979797 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.979840 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run-ovn\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.979868 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvdg5\" (UniqueName: \"kubernetes.io/projected/521e2d50-f7e0-4040-a6b8-1d601ab73282-kube-api-access-tvdg5\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.980499 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.980538 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-log-ovn\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.980554 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run-ovn\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.981227 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-additional-scripts\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:49 crc kubenswrapper[4634]: I0929 14:02:49.982410 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-scripts\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.019163 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvdg5\" (UniqueName: \"kubernetes.io/projected/521e2d50-f7e0-4040-a6b8-1d601ab73282-kube-api-access-tvdg5\") pod \"ovn-controller-lfvq4-config-6xsq8\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.034725 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-lfvq4" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.095608 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.104771 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-config-data\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.104943 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drl7r\" (UniqueName: \"kubernetes.io/projected/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-kube-api-access-drl7r\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.104995 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-combined-ca-bundle\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.105101 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-db-sync-config-data\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.197399 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d221b5c7-1165-4965-b7c0-0511dafce830" path="/var/lib/kubelet/pods/d221b5c7-1165-4965-b7c0-0511dafce830/volumes" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.198682 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.211564 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drl7r\" (UniqueName: \"kubernetes.io/projected/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-kube-api-access-drl7r\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.211637 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-combined-ca-bundle\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.211698 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-db-sync-config-data\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.211829 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-config-data\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.219658 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-db-sync-config-data\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.232827 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-combined-ca-bundle\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.234477 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-config-data\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.249193 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drl7r\" (UniqueName: \"kubernetes.io/projected/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-kube-api-access-drl7r\") pod \"glance-db-sync-25zsb\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.525351 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-25zsb" Sep 29 14:02:50 crc kubenswrapper[4634]: I0929 14:02:50.710556 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-lfvq4-config-6xsq8"] Sep 29 14:02:51 crc kubenswrapper[4634]: I0929 14:02:51.123288 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"cdf03279594378dd4d92e876f7ac59b97cac727cf594553fc1f1300714af206f"} Sep 29 14:02:51 crc kubenswrapper[4634]: I0929 14:02:51.126657 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lfvq4-config-6xsq8" event={"ID":"521e2d50-f7e0-4040-a6b8-1d601ab73282","Type":"ContainerStarted","Data":"8af4000d7dc108ffd6f2700ecb07b51b436dcc90d1ae688937e2cd23ef630b24"} Sep 29 14:02:51 crc kubenswrapper[4634]: I0929 14:02:51.208893 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-25zsb"] Sep 29 14:02:51 crc kubenswrapper[4634]: W0929 14:02:51.230078 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43cee9c9_8b49_4b42_a525_ccd9ab1a9730.slice/crio-6894af1c5913d6b7c517560d0b9dcdfb1caebd34666453645570b482ac978b3b WatchSource:0}: Error finding container 6894af1c5913d6b7c517560d0b9dcdfb1caebd34666453645570b482ac978b3b: Status 404 returned error can't find the container with id 6894af1c5913d6b7c517560d0b9dcdfb1caebd34666453645570b482ac978b3b Sep 29 14:02:51 crc kubenswrapper[4634]: I0929 14:02:51.465094 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Sep 29 14:02:52 crc kubenswrapper[4634]: I0929 14:02:52.147326 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"c65c7323322dd4e90e67e1aa00b33e8793f08898c9c5452a0be5b7fc2e41a219"} Sep 29 14:02:52 crc kubenswrapper[4634]: I0929 14:02:52.152506 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-25zsb" event={"ID":"43cee9c9-8b49-4b42-a525-ccd9ab1a9730","Type":"ContainerStarted","Data":"6894af1c5913d6b7c517560d0b9dcdfb1caebd34666453645570b482ac978b3b"} Sep 29 14:02:52 crc kubenswrapper[4634]: I0929 14:02:52.155299 4634 generic.go:334] "Generic (PLEG): container finished" podID="521e2d50-f7e0-4040-a6b8-1d601ab73282" containerID="5b474e7fdb952612e113145a450b9833461dac6d9cfe3d0d7b72c8656be5cf77" exitCode=0 Sep 29 14:02:52 crc kubenswrapper[4634]: I0929 14:02:52.155352 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lfvq4-config-6xsq8" event={"ID":"521e2d50-f7e0-4040-a6b8-1d601ab73282","Type":"ContainerDied","Data":"5b474e7fdb952612e113145a450b9833461dac6d9cfe3d0d7b72c8656be5cf77"} Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.170543 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"8253e8af7312b54a6633dd7e6da8f98800e72440586a18ec59643efe00355617"} Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.170613 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"847892e6959bd77d1d422b441c9e71bd3faff8e80068634fd1e14fd657dde592"} Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.170630 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"40f5f9a989845bfd14ea259d7293cfb5549d1e7dbc758ab36edc9e77561d8449"} Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.507108 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.590597 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-additional-scripts\") pod \"521e2d50-f7e0-4040-a6b8-1d601ab73282\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.590723 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-scripts\") pod \"521e2d50-f7e0-4040-a6b8-1d601ab73282\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.590771 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-log-ovn\") pod \"521e2d50-f7e0-4040-a6b8-1d601ab73282\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.590836 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvdg5\" (UniqueName: \"kubernetes.io/projected/521e2d50-f7e0-4040-a6b8-1d601ab73282-kube-api-access-tvdg5\") pod \"521e2d50-f7e0-4040-a6b8-1d601ab73282\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.590972 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run-ovn\") pod \"521e2d50-f7e0-4040-a6b8-1d601ab73282\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.591065 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run\") pod \"521e2d50-f7e0-4040-a6b8-1d601ab73282\" (UID: \"521e2d50-f7e0-4040-a6b8-1d601ab73282\") " Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.591236 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "521e2d50-f7e0-4040-a6b8-1d601ab73282" (UID: "521e2d50-f7e0-4040-a6b8-1d601ab73282"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.591328 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "521e2d50-f7e0-4040-a6b8-1d601ab73282" (UID: "521e2d50-f7e0-4040-a6b8-1d601ab73282"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.591362 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run" (OuterVolumeSpecName: "var-run") pod "521e2d50-f7e0-4040-a6b8-1d601ab73282" (UID: "521e2d50-f7e0-4040-a6b8-1d601ab73282"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.591970 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "521e2d50-f7e0-4040-a6b8-1d601ab73282" (UID: "521e2d50-f7e0-4040-a6b8-1d601ab73282"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.592142 4634 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.592163 4634 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.592176 4634 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/521e2d50-f7e0-4040-a6b8-1d601ab73282-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.592619 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-scripts" (OuterVolumeSpecName: "scripts") pod "521e2d50-f7e0-4040-a6b8-1d601ab73282" (UID: "521e2d50-f7e0-4040-a6b8-1d601ab73282"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.601365 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/521e2d50-f7e0-4040-a6b8-1d601ab73282-kube-api-access-tvdg5" (OuterVolumeSpecName: "kube-api-access-tvdg5") pod "521e2d50-f7e0-4040-a6b8-1d601ab73282" (UID: "521e2d50-f7e0-4040-a6b8-1d601ab73282"). InnerVolumeSpecName "kube-api-access-tvdg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.693939 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvdg5\" (UniqueName: \"kubernetes.io/projected/521e2d50-f7e0-4040-a6b8-1d601ab73282-kube-api-access-tvdg5\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.693982 4634 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:53 crc kubenswrapper[4634]: I0929 14:02:53.693997 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/521e2d50-f7e0-4040-a6b8-1d601ab73282-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:02:54 crc kubenswrapper[4634]: I0929 14:02:54.181975 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"7c34ab8428db1b5c3f86ebe318b7f587cc261e1676313ecce6f984e35e5d8a41"} Sep 29 14:02:54 crc kubenswrapper[4634]: I0929 14:02:54.185436 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-lfvq4-config-6xsq8" event={"ID":"521e2d50-f7e0-4040-a6b8-1d601ab73282","Type":"ContainerDied","Data":"8af4000d7dc108ffd6f2700ecb07b51b436dcc90d1ae688937e2cd23ef630b24"} Sep 29 14:02:54 crc kubenswrapper[4634]: I0929 14:02:54.185467 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8af4000d7dc108ffd6f2700ecb07b51b436dcc90d1ae688937e2cd23ef630b24" Sep 29 14:02:54 crc kubenswrapper[4634]: I0929 14:02:54.185522 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-lfvq4-config-6xsq8" Sep 29 14:02:54 crc kubenswrapper[4634]: I0929 14:02:54.639645 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-lfvq4-config-6xsq8"] Sep 29 14:02:54 crc kubenswrapper[4634]: I0929 14:02:54.646306 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-lfvq4-config-6xsq8"] Sep 29 14:02:55 crc kubenswrapper[4634]: I0929 14:02:55.201721 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"84b980ddd32d60eb0c27bc26912d8413262f77ae6f309b9bd6876b431734dfff"} Sep 29 14:02:55 crc kubenswrapper[4634]: I0929 14:02:55.202237 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"f12a513716a04596641735b0e84fbf38dadee7e8044a0db989b7a0a6be10a671"} Sep 29 14:02:55 crc kubenswrapper[4634]: I0929 14:02:55.202250 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"03a920c0736ccb43cd820441f057b694b9a7a1e966a6b3b6ce2b3b892f1ed6da"} Sep 29 14:02:56 crc kubenswrapper[4634]: I0929 14:02:56.123802 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="521e2d50-f7e0-4040-a6b8-1d601ab73282" path="/var/lib/kubelet/pods/521e2d50-f7e0-4040-a6b8-1d601ab73282/volumes" Sep 29 14:02:57 crc kubenswrapper[4634]: I0929 14:02:57.235826 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"1b9ee9071eaf93e462e8dc8704e0a9de37c7c88ddc37ec0f86cbb9edcf24ac42"} Sep 29 14:02:57 crc kubenswrapper[4634]: I0929 14:02:57.236327 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"833c76c0fb896368b0bb0b96297c62e6c8616589c2180b0904d0dcf01b014a7b"} Sep 29 14:02:57 crc kubenswrapper[4634]: I0929 14:02:57.236340 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"fd8f6b681128f2794aca54e4947d117bebfdd2bef597a341d1ed1d3fccd90658"} Sep 29 14:02:58 crc kubenswrapper[4634]: I0929 14:02:58.253792 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"34de202081bad26a7555360945739ac2061e3bfdec8d2d28b5c4a098eb94776c"} Sep 29 14:03:00 crc kubenswrapper[4634]: I0929 14:03:00.705772 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:03:01 crc kubenswrapper[4634]: I0929 14:03:01.473130 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.064990 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-pkjb5"] Sep 29 14:03:04 crc kubenswrapper[4634]: E0929 14:03:04.065428 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521e2d50-f7e0-4040-a6b8-1d601ab73282" containerName="ovn-config" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.065446 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="521e2d50-f7e0-4040-a6b8-1d601ab73282" containerName="ovn-config" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.065616 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="521e2d50-f7e0-4040-a6b8-1d601ab73282" containerName="ovn-config" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.066264 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-pkjb5" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.142872 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-pkjb5"] Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.172200 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvtdz\" (UniqueName: \"kubernetes.io/projected/4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8-kube-api-access-bvtdz\") pod \"cinder-db-create-pkjb5\" (UID: \"4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8\") " pod="openstack/cinder-db-create-pkjb5" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.221323 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-jcggm"] Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.223934 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jcggm" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.293777 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvtdz\" (UniqueName: \"kubernetes.io/projected/4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8-kube-api-access-bvtdz\") pod \"cinder-db-create-pkjb5\" (UID: \"4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8\") " pod="openstack/cinder-db-create-pkjb5" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.294146 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jcggm"] Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.345313 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvtdz\" (UniqueName: \"kubernetes.io/projected/4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8-kube-api-access-bvtdz\") pod \"cinder-db-create-pkjb5\" (UID: \"4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8\") " pod="openstack/cinder-db-create-pkjb5" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.350322 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-9gvtl"] Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.351768 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9gvtl" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.373933 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-9gvtl"] Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.388418 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-pkjb5" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.397008 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsvgs\" (UniqueName: \"kubernetes.io/projected/73d87cd9-cadf-4579-b3e2-a2534568d559-kube-api-access-wsvgs\") pod \"barbican-db-create-jcggm\" (UID: \"73d87cd9-cadf-4579-b3e2-a2534568d559\") " pod="openstack/barbican-db-create-jcggm" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.478969 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-bwwg8"] Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.480046 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.483951 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.484243 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-l544j" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.484421 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.484460 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.499899 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-bwwg8"] Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.501016 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6z9h\" (UniqueName: \"kubernetes.io/projected/b539ef0d-f88f-485d-853d-1f340eedb31c-kube-api-access-q6z9h\") pod \"neutron-db-create-9gvtl\" (UID: \"b539ef0d-f88f-485d-853d-1f340eedb31c\") " pod="openstack/neutron-db-create-9gvtl" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.501332 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsvgs\" (UniqueName: \"kubernetes.io/projected/73d87cd9-cadf-4579-b3e2-a2534568d559-kube-api-access-wsvgs\") pod \"barbican-db-create-jcggm\" (UID: \"73d87cd9-cadf-4579-b3e2-a2534568d559\") " pod="openstack/barbican-db-create-jcggm" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.565233 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsvgs\" (UniqueName: \"kubernetes.io/projected/73d87cd9-cadf-4579-b3e2-a2534568d559-kube-api-access-wsvgs\") pod \"barbican-db-create-jcggm\" (UID: \"73d87cd9-cadf-4579-b3e2-a2534568d559\") " pod="openstack/barbican-db-create-jcggm" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.565926 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jcggm" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.603402 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-combined-ca-bundle\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.603491 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-config-data\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.603723 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5kh5\" (UniqueName: \"kubernetes.io/projected/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-kube-api-access-s5kh5\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.604031 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6z9h\" (UniqueName: \"kubernetes.io/projected/b539ef0d-f88f-485d-853d-1f340eedb31c-kube-api-access-q6z9h\") pod \"neutron-db-create-9gvtl\" (UID: \"b539ef0d-f88f-485d-853d-1f340eedb31c\") " pod="openstack/neutron-db-create-9gvtl" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.625900 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6z9h\" (UniqueName: \"kubernetes.io/projected/b539ef0d-f88f-485d-853d-1f340eedb31c-kube-api-access-q6z9h\") pod \"neutron-db-create-9gvtl\" (UID: \"b539ef0d-f88f-485d-853d-1f340eedb31c\") " pod="openstack/neutron-db-create-9gvtl" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.703897 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9gvtl" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.705609 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-combined-ca-bundle\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.705788 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-config-data\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.706462 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5kh5\" (UniqueName: \"kubernetes.io/projected/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-kube-api-access-s5kh5\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.712014 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-config-data\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.727505 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-combined-ca-bundle\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.729750 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5kh5\" (UniqueName: \"kubernetes.io/projected/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-kube-api-access-s5kh5\") pod \"keystone-db-sync-bwwg8\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:04 crc kubenswrapper[4634]: I0929 14:03:04.799521 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:07 crc kubenswrapper[4634]: E0929 14:03:07.467057 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Sep 29 14:03:07 crc kubenswrapper[4634]: E0929 14:03:07.468417 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-drl7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-25zsb_openstack(43cee9c9-8b49-4b42-a525-ccd9ab1a9730): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:03:07 crc kubenswrapper[4634]: E0929 14:03:07.469691 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-25zsb" podUID="43cee9c9-8b49-4b42-a525-ccd9ab1a9730" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.395059 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"3962028b160972818717ef02bf5b49ec72c9990755f039442d18da962368f4e1"} Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.396053 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"6b8a0ffd8f3715580e77a0938a5bea5fd4d5c73398ace4af3a446226ba1a7bcb"} Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.396098 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"80d6ffb7-ae89-453f-8694-074a86517297","Type":"ContainerStarted","Data":"9e4fb275df0ae2eec32e6c4806ad0074f87ddb1ef5779291f01995233a2d4231"} Sep 29 14:03:08 crc kubenswrapper[4634]: E0929 14:03:08.397782 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-25zsb" podUID="43cee9c9-8b49-4b42-a525-ccd9ab1a9730" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.439962 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=46.456460417 podStartE2EDuration="52.439937432s" podCreationTimestamp="2025-09-29 14:02:16 +0000 UTC" firstStartedPulling="2025-09-29 14:02:50.1908183 +0000 UTC m=+1100.759546049" lastFinishedPulling="2025-09-29 14:02:56.174295315 +0000 UTC m=+1106.743023064" observedRunningTime="2025-09-29 14:03:08.431024148 +0000 UTC m=+1118.999751897" watchObservedRunningTime="2025-09-29 14:03:08.439937432 +0000 UTC m=+1119.008665181" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.619054 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-9gvtl"] Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.631140 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-pkjb5"] Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.651747 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-bwwg8"] Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.672019 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-jcggm"] Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.862477 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-w88gx"] Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.864169 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.871706 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-w88gx"] Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.926292 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.926337 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.926365 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.926390 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.926424 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fpmr\" (UniqueName: \"kubernetes.io/projected/ab8abb49-8275-41ed-b2b9-1bff56c5790c-kube-api-access-2fpmr\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.926464 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-config\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:08 crc kubenswrapper[4634]: I0929 14:03:08.926774 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.028370 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.029312 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.029316 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.029468 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.029521 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.029575 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fpmr\" (UniqueName: \"kubernetes.io/projected/ab8abb49-8275-41ed-b2b9-1bff56c5790c-kube-api-access-2fpmr\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.029643 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-config\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.032252 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-config\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.032651 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.032996 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.033596 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.050839 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fpmr\" (UniqueName: \"kubernetes.io/projected/ab8abb49-8275-41ed-b2b9-1bff56c5790c-kube-api-access-2fpmr\") pod \"dnsmasq-dns-77585f5f8c-w88gx\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.272777 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.408018 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-bwwg8" event={"ID":"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3","Type":"ContainerStarted","Data":"8ee1309c8df607905ecb7a5fcef1156fbf1f54efb52bd29a64ec72b71e8bb788"} Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.409459 4634 generic.go:334] "Generic (PLEG): container finished" podID="73d87cd9-cadf-4579-b3e2-a2534568d559" containerID="7ebb0c1c2f2a2aecd1e5964b99c3d5fa9b72bc819bec1584c801135bc771f7fb" exitCode=0 Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.409520 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jcggm" event={"ID":"73d87cd9-cadf-4579-b3e2-a2534568d559","Type":"ContainerDied","Data":"7ebb0c1c2f2a2aecd1e5964b99c3d5fa9b72bc819bec1584c801135bc771f7fb"} Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.409539 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jcggm" event={"ID":"73d87cd9-cadf-4579-b3e2-a2534568d559","Type":"ContainerStarted","Data":"866de339ba6472c073417a1e9a7893316387ab058303cc4bb569ebe0f83f5b37"} Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.412013 4634 generic.go:334] "Generic (PLEG): container finished" podID="4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8" containerID="93e8e4d824f24622ca6409aafd5fb701c7187189c086884a1328c10d2439f3f5" exitCode=0 Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.412113 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-pkjb5" event={"ID":"4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8","Type":"ContainerDied","Data":"93e8e4d824f24622ca6409aafd5fb701c7187189c086884a1328c10d2439f3f5"} Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.412153 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-pkjb5" event={"ID":"4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8","Type":"ContainerStarted","Data":"8af07818cfc76263b4172bc8ed4ecb44a5c8edbfd166a7aff6cb3b2a6bb00c05"} Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.413445 4634 generic.go:334] "Generic (PLEG): container finished" podID="b539ef0d-f88f-485d-853d-1f340eedb31c" containerID="b1c71828c67b5f19e9593d4e690bc3a64235594808963e2d181d6ba42a069b08" exitCode=0 Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.413557 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9gvtl" event={"ID":"b539ef0d-f88f-485d-853d-1f340eedb31c","Type":"ContainerDied","Data":"b1c71828c67b5f19e9593d4e690bc3a64235594808963e2d181d6ba42a069b08"} Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.413575 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9gvtl" event={"ID":"b539ef0d-f88f-485d-853d-1f340eedb31c","Type":"ContainerStarted","Data":"9e15a7788b4e462fce09f23f6c55113b29f5326cb8433c23a18487ffffbe7e52"} Sep 29 14:03:09 crc kubenswrapper[4634]: I0929 14:03:09.779038 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-w88gx"] Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.425844 4634 generic.go:334] "Generic (PLEG): container finished" podID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerID="11b6ffaf90198e70594a39598993c1923da047313ee98868ee82c0109cd05fea" exitCode=0 Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.427167 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" event={"ID":"ab8abb49-8275-41ed-b2b9-1bff56c5790c","Type":"ContainerDied","Data":"11b6ffaf90198e70594a39598993c1923da047313ee98868ee82c0109cd05fea"} Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.427205 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" event={"ID":"ab8abb49-8275-41ed-b2b9-1bff56c5790c","Type":"ContainerStarted","Data":"855e968948ffc9ae92764ea7b7eee47e1e45fa7212157271700127154ba9211c"} Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.825740 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-pkjb5" Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.864557 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9gvtl" Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.889178 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvtdz\" (UniqueName: \"kubernetes.io/projected/4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8-kube-api-access-bvtdz\") pod \"4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8\" (UID: \"4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8\") " Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.889226 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6z9h\" (UniqueName: \"kubernetes.io/projected/b539ef0d-f88f-485d-853d-1f340eedb31c-kube-api-access-q6z9h\") pod \"b539ef0d-f88f-485d-853d-1f340eedb31c\" (UID: \"b539ef0d-f88f-485d-853d-1f340eedb31c\") " Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.889402 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jcggm" Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.894131 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8-kube-api-access-bvtdz" (OuterVolumeSpecName: "kube-api-access-bvtdz") pod "4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8" (UID: "4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8"). InnerVolumeSpecName "kube-api-access-bvtdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.895672 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b539ef0d-f88f-485d-853d-1f340eedb31c-kube-api-access-q6z9h" (OuterVolumeSpecName: "kube-api-access-q6z9h") pod "b539ef0d-f88f-485d-853d-1f340eedb31c" (UID: "b539ef0d-f88f-485d-853d-1f340eedb31c"). InnerVolumeSpecName "kube-api-access-q6z9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.990560 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsvgs\" (UniqueName: \"kubernetes.io/projected/73d87cd9-cadf-4579-b3e2-a2534568d559-kube-api-access-wsvgs\") pod \"73d87cd9-cadf-4579-b3e2-a2534568d559\" (UID: \"73d87cd9-cadf-4579-b3e2-a2534568d559\") " Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.991395 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6z9h\" (UniqueName: \"kubernetes.io/projected/b539ef0d-f88f-485d-853d-1f340eedb31c-kube-api-access-q6z9h\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.991418 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvtdz\" (UniqueName: \"kubernetes.io/projected/4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8-kube-api-access-bvtdz\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:10 crc kubenswrapper[4634]: I0929 14:03:10.994652 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73d87cd9-cadf-4579-b3e2-a2534568d559-kube-api-access-wsvgs" (OuterVolumeSpecName: "kube-api-access-wsvgs") pod "73d87cd9-cadf-4579-b3e2-a2534568d559" (UID: "73d87cd9-cadf-4579-b3e2-a2534568d559"). InnerVolumeSpecName "kube-api-access-wsvgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.092321 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsvgs\" (UniqueName: \"kubernetes.io/projected/73d87cd9-cadf-4579-b3e2-a2534568d559-kube-api-access-wsvgs\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.444369 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-jcggm" event={"ID":"73d87cd9-cadf-4579-b3e2-a2534568d559","Type":"ContainerDied","Data":"866de339ba6472c073417a1e9a7893316387ab058303cc4bb569ebe0f83f5b37"} Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.444449 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="866de339ba6472c073417a1e9a7893316387ab058303cc4bb569ebe0f83f5b37" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.444548 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-jcggm" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.447544 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" event={"ID":"ab8abb49-8275-41ed-b2b9-1bff56c5790c","Type":"ContainerStarted","Data":"ed079008aeefce5858d982d3d33a2d5b455bd2a86cbe956c8de628b65a9b354a"} Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.447809 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.450789 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-pkjb5" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.450800 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-pkjb5" event={"ID":"4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8","Type":"ContainerDied","Data":"8af07818cfc76263b4172bc8ed4ecb44a5c8edbfd166a7aff6cb3b2a6bb00c05"} Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.450827 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8af07818cfc76263b4172bc8ed4ecb44a5c8edbfd166a7aff6cb3b2a6bb00c05" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.461651 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9gvtl" event={"ID":"b539ef0d-f88f-485d-853d-1f340eedb31c","Type":"ContainerDied","Data":"9e15a7788b4e462fce09f23f6c55113b29f5326cb8433c23a18487ffffbe7e52"} Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.461683 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e15a7788b4e462fce09f23f6c55113b29f5326cb8433c23a18487ffffbe7e52" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.461704 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9gvtl" Sep 29 14:03:11 crc kubenswrapper[4634]: I0929 14:03:11.482189 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" podStartSLOduration=3.482158548 podStartE2EDuration="3.482158548s" podCreationTimestamp="2025-09-29 14:03:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:03:11.475944958 +0000 UTC m=+1122.044672707" watchObservedRunningTime="2025-09-29 14:03:11.482158548 +0000 UTC m=+1122.050886337" Sep 29 14:03:14 crc kubenswrapper[4634]: I0929 14:03:14.396156 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:03:14 crc kubenswrapper[4634]: I0929 14:03:14.397029 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:03:16 crc kubenswrapper[4634]: I0929 14:03:16.507543 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-bwwg8" event={"ID":"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3","Type":"ContainerStarted","Data":"0cd7ea42be56e61373ba79946e83c00c514c6213ecfc42d0f1beac026e1f0818"} Sep 29 14:03:16 crc kubenswrapper[4634]: I0929 14:03:16.533571 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-bwwg8" podStartSLOduration=5.442914444 podStartE2EDuration="12.533547046s" podCreationTimestamp="2025-09-29 14:03:04 +0000 UTC" firstStartedPulling="2025-09-29 14:03:08.672991936 +0000 UTC m=+1119.241719685" lastFinishedPulling="2025-09-29 14:03:15.763624538 +0000 UTC m=+1126.332352287" observedRunningTime="2025-09-29 14:03:16.532020475 +0000 UTC m=+1127.100748264" watchObservedRunningTime="2025-09-29 14:03:16.533547046 +0000 UTC m=+1127.102274805" Sep 29 14:03:19 crc kubenswrapper[4634]: I0929 14:03:19.276512 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:19 crc kubenswrapper[4634]: I0929 14:03:19.358569 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-f8sjj"] Sep 29 14:03:19 crc kubenswrapper[4634]: I0929 14:03:19.359069 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-f8sjj" podUID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" containerName="dnsmasq-dns" containerID="cri-o://912fdde47aa8722999cd978b7ed27d2c40e10e85d16345668086c4ed9e3d7915" gracePeriod=10 Sep 29 14:03:19 crc kubenswrapper[4634]: I0929 14:03:19.540653 4634 generic.go:334] "Generic (PLEG): container finished" podID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" containerID="912fdde47aa8722999cd978b7ed27d2c40e10e85d16345668086c4ed9e3d7915" exitCode=0 Sep 29 14:03:19 crc kubenswrapper[4634]: I0929 14:03:19.540749 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-f8sjj" event={"ID":"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab","Type":"ContainerDied","Data":"912fdde47aa8722999cd978b7ed27d2c40e10e85d16345668086c4ed9e3d7915"} Sep 29 14:03:19 crc kubenswrapper[4634]: I0929 14:03:19.545850 4634 generic.go:334] "Generic (PLEG): container finished" podID="1536d3a4-ce3c-429a-a2ff-0a50e1db22d3" containerID="0cd7ea42be56e61373ba79946e83c00c514c6213ecfc42d0f1beac026e1f0818" exitCode=0 Sep 29 14:03:19 crc kubenswrapper[4634]: I0929 14:03:19.545900 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-bwwg8" event={"ID":"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3","Type":"ContainerDied","Data":"0cd7ea42be56e61373ba79946e83c00c514c6213ecfc42d0f1beac026e1f0818"} Sep 29 14:03:19 crc kubenswrapper[4634]: I0929 14:03:19.963999 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.098641 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-nb\") pod \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.098905 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnfw4\" (UniqueName: \"kubernetes.io/projected/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-kube-api-access-mnfw4\") pod \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.099027 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-dns-svc\") pod \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.099063 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-config\") pod \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.099900 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-sb\") pod \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\" (UID: \"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab\") " Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.109557 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-kube-api-access-mnfw4" (OuterVolumeSpecName: "kube-api-access-mnfw4") pod "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" (UID: "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab"). InnerVolumeSpecName "kube-api-access-mnfw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.175468 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" (UID: "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.180410 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-config" (OuterVolumeSpecName: "config") pod "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" (UID: "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.181459 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" (UID: "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.204391 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.204470 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnfw4\" (UniqueName: \"kubernetes.io/projected/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-kube-api-access-mnfw4\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.204491 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.204536 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.224263 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" (UID: "f2a0ccf1-a36f-4573-8b3c-59cf92e096ab"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.306588 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.557822 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-f8sjj" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.557871 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-f8sjj" event={"ID":"f2a0ccf1-a36f-4573-8b3c-59cf92e096ab","Type":"ContainerDied","Data":"78e5a9659ec0bfe40ab9d183cc8e240372463eaffa8922533b25b1cd735e538c"} Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.558832 4634 scope.go:117] "RemoveContainer" containerID="912fdde47aa8722999cd978b7ed27d2c40e10e85d16345668086c4ed9e3d7915" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.560923 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-25zsb" event={"ID":"43cee9c9-8b49-4b42-a525-ccd9ab1a9730","Type":"ContainerStarted","Data":"0ab2e3ac4ec4ec2bb0e10bd7f8c433d0a587dc58fb1b8544e95a404a5a38cc04"} Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.595587 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-25zsb" podStartSLOduration=3.012208474 podStartE2EDuration="31.595563454s" podCreationTimestamp="2025-09-29 14:02:49 +0000 UTC" firstStartedPulling="2025-09-29 14:02:51.232625353 +0000 UTC m=+1101.801353102" lastFinishedPulling="2025-09-29 14:03:19.815980303 +0000 UTC m=+1130.384708082" observedRunningTime="2025-09-29 14:03:20.583015481 +0000 UTC m=+1131.151743250" watchObservedRunningTime="2025-09-29 14:03:20.595563454 +0000 UTC m=+1131.164291213" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.612777 4634 scope.go:117] "RemoveContainer" containerID="852983e6cc94c98ca2822eaae81a17239150bce641d539329ef81c5676d73649" Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.617789 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-f8sjj"] Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.623629 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-f8sjj"] Sep 29 14:03:20 crc kubenswrapper[4634]: I0929 14:03:20.901147 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.024816 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-combined-ca-bundle\") pod \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.025076 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-config-data\") pod \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.025170 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5kh5\" (UniqueName: \"kubernetes.io/projected/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-kube-api-access-s5kh5\") pod \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\" (UID: \"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3\") " Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.048508 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-kube-api-access-s5kh5" (OuterVolumeSpecName: "kube-api-access-s5kh5") pod "1536d3a4-ce3c-429a-a2ff-0a50e1db22d3" (UID: "1536d3a4-ce3c-429a-a2ff-0a50e1db22d3"). InnerVolumeSpecName "kube-api-access-s5kh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.060425 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1536d3a4-ce3c-429a-a2ff-0a50e1db22d3" (UID: "1536d3a4-ce3c-429a-a2ff-0a50e1db22d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.113832 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-config-data" (OuterVolumeSpecName: "config-data") pod "1536d3a4-ce3c-429a-a2ff-0a50e1db22d3" (UID: "1536d3a4-ce3c-429a-a2ff-0a50e1db22d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.128966 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.129003 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5kh5\" (UniqueName: \"kubernetes.io/projected/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-kube-api-access-s5kh5\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.129018 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.577629 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-bwwg8" event={"ID":"1536d3a4-ce3c-429a-a2ff-0a50e1db22d3","Type":"ContainerDied","Data":"8ee1309c8df607905ecb7a5fcef1156fbf1f54efb52bd29a64ec72b71e8bb788"} Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.578138 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ee1309c8df607905ecb7a5fcef1156fbf1f54efb52bd29a64ec72b71e8bb788" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.577716 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-bwwg8" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.927498 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-rgcgg"] Sep 29 14:03:21 crc kubenswrapper[4634]: E0929 14:03:21.927946 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" containerName="dnsmasq-dns" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.927966 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" containerName="dnsmasq-dns" Sep 29 14:03:21 crc kubenswrapper[4634]: E0929 14:03:21.927993 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1536d3a4-ce3c-429a-a2ff-0a50e1db22d3" containerName="keystone-db-sync" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928000 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1536d3a4-ce3c-429a-a2ff-0a50e1db22d3" containerName="keystone-db-sync" Sep 29 14:03:21 crc kubenswrapper[4634]: E0929 14:03:21.928020 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" containerName="init" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928027 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" containerName="init" Sep 29 14:03:21 crc kubenswrapper[4634]: E0929 14:03:21.928036 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928042 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: E0929 14:03:21.928056 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b539ef0d-f88f-485d-853d-1f340eedb31c" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928063 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b539ef0d-f88f-485d-853d-1f340eedb31c" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: E0929 14:03:21.928110 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73d87cd9-cadf-4579-b3e2-a2534568d559" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928117 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="73d87cd9-cadf-4579-b3e2-a2534568d559" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928301 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1536d3a4-ce3c-429a-a2ff-0a50e1db22d3" containerName="keystone-db-sync" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928323 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="73d87cd9-cadf-4579-b3e2-a2534568d559" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928341 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b539ef0d-f88f-485d-853d-1f340eedb31c" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928352 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8" containerName="mariadb-database-create" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.928371 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" containerName="dnsmasq-dns" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.929340 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.959793 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-vp95h"] Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.962922 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.968607 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.974053 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-l544j" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.974339 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.977892 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-rgcgg"] Sep 29 14:03:21 crc kubenswrapper[4634]: I0929 14:03:21.984407 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.029136 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vp95h"] Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.053910 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-config-data\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054022 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054067 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p74dm\" (UniqueName: \"kubernetes.io/projected/4cfe29ba-9318-4725-bd8d-771a6f1360c0-kube-api-access-p74dm\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054125 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-credential-keys\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054150 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054173 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-scripts\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054194 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054291 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85c2x\" (UniqueName: \"kubernetes.io/projected/483d069a-57ad-49fd-80a4-d16456e7e894-kube-api-access-85c2x\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054353 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-config\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054417 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-fernet-keys\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054506 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-combined-ca-bundle\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.054592 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-svc\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.136756 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2a0ccf1-a36f-4573-8b3c-59cf92e096ab" path="/var/lib/kubelet/pods/f2a0ccf1-a36f-4573-8b3c-59cf92e096ab/volumes" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160148 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160216 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p74dm\" (UniqueName: \"kubernetes.io/projected/4cfe29ba-9318-4725-bd8d-771a6f1360c0-kube-api-access-p74dm\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160253 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-credential-keys\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160276 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160297 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-scripts\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160319 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160349 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85c2x\" (UniqueName: \"kubernetes.io/projected/483d069a-57ad-49fd-80a4-d16456e7e894-kube-api-access-85c2x\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160378 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-config\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160410 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-fernet-keys\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160445 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-combined-ca-bundle\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160478 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-svc\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.160509 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-config-data\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.164586 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-nb\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.183854 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-sb\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.188735 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-fernet-keys\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.193006 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-config\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.196829 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-credential-keys\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.197721 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-combined-ca-bundle\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.201843 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-svc\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.210042 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-scripts\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.216928 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-swift-storage-0\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.234427 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-config-data\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.273689 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p74dm\" (UniqueName: \"kubernetes.io/projected/4cfe29ba-9318-4725-bd8d-771a6f1360c0-kube-api-access-p74dm\") pod \"keystone-bootstrap-vp95h\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.286647 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85c2x\" (UniqueName: \"kubernetes.io/projected/483d069a-57ad-49fd-80a4-d16456e7e894-kube-api-access-85c2x\") pod \"dnsmasq-dns-55fff446b9-rgcgg\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.303864 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.539401 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-59f849cb9-zxspc"] Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.541022 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.580810 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.589941 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.590236 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.590358 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-gvkpx" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.590790 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.591850 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.598705 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.624829 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.640217 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687163 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmk5t\" (UniqueName: \"kubernetes.io/projected/f52e3024-e154-435d-892c-86a1b3344fb5-kube-api-access-mmk5t\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687238 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-log-httpd\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687283 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f52e3024-e154-435d-892c-86a1b3344fb5-horizon-secret-key\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687302 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687327 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csfmr\" (UniqueName: \"kubernetes.io/projected/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-kube-api-access-csfmr\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687347 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-run-httpd\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687366 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-config-data\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687386 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-scripts\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687411 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687439 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-scripts\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687459 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-config-data\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.687478 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f52e3024-e154-435d-892c-86a1b3344fb5-logs\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.732220 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-rgcgg"] Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.779262 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-59f849cb9-zxspc"] Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.821067 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.831450 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-config-data\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.831542 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f52e3024-e154-435d-892c-86a1b3344fb5-logs\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.831678 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmk5t\" (UniqueName: \"kubernetes.io/projected/f52e3024-e154-435d-892c-86a1b3344fb5-kube-api-access-mmk5t\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.831798 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-log-httpd\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.831900 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f52e3024-e154-435d-892c-86a1b3344fb5-horizon-secret-key\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.831922 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.831954 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csfmr\" (UniqueName: \"kubernetes.io/projected/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-kube-api-access-csfmr\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.831977 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-run-httpd\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.832007 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-config-data\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.832033 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-scripts\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.832098 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.832154 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-scripts\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.836417 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f52e3024-e154-435d-892c-86a1b3344fb5-logs\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.836827 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-log-httpd\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.837801 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-run-httpd\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.840930 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-config-data\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.841745 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-scripts\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.862498 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.863369 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.884051 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-config-data\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.884462 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f52e3024-e154-435d-892c-86a1b3344fb5-horizon-secret-key\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.888465 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-scripts\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.905482 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csfmr\" (UniqueName: \"kubernetes.io/projected/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-kube-api-access-csfmr\") pod \"ceilometer-0\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " pod="openstack/ceilometer-0" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.914142 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-577dn"] Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.915400 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmk5t\" (UniqueName: \"kubernetes.io/projected/f52e3024-e154-435d-892c-86a1b3344fb5-kube-api-access-mmk5t\") pod \"horizon-59f849cb9-zxspc\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.916627 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-577dn" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.920257 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.926837 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qw9np" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.927016 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.952930 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-zjh78"] Sep 29 14:03:22 crc kubenswrapper[4634]: I0929 14:03:22.956716 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.023482 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.040728 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-scripts\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.040820 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-logs\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.040856 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-config-data\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.040872 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-combined-ca-bundle\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.040928 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6pvg\" (UniqueName: \"kubernetes.io/projected/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-kube-api-access-l6pvg\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.079720 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-577dn"] Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.137031 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-zjh78"] Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144376 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-config-data\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144422 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-combined-ca-bundle\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144465 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144533 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6pvg\" (UniqueName: \"kubernetes.io/projected/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-kube-api-access-l6pvg\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144557 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144579 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-scripts\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144620 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mflb5\" (UniqueName: \"kubernetes.io/projected/c74c2608-d914-4498-a016-603a32c1fd5c-kube-api-access-mflb5\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144654 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144678 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-logs\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144704 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-config\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.144721 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.151276 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-logs\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.153517 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-combined-ca-bundle\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.160519 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-scripts\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.162271 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-config-data\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.170041 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.195654 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6pvg\" (UniqueName: \"kubernetes.io/projected/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-kube-api-access-l6pvg\") pod \"placement-db-sync-577dn\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.196157 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-69db6765d5-mgkh7"] Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.197897 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.221858 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-69db6765d5-mgkh7"] Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.262013 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.262214 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mflb5\" (UniqueName: \"kubernetes.io/projected/c74c2608-d914-4498-a016-603a32c1fd5c-kube-api-access-mflb5\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.262317 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.263380 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-nb\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.262425 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.263461 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-config\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.263533 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.264123 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-sb\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.264726 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-swift-storage-0\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.264731 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-svc\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.264852 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-config\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.287919 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mflb5\" (UniqueName: \"kubernetes.io/projected/c74c2608-d914-4498-a016-603a32c1fd5c-kube-api-access-mflb5\") pod \"dnsmasq-dns-76fcf4b695-zjh78\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.320785 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-577dn" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.366731 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-scripts\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.366842 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-config-data\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.366924 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-horizon-secret-key\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.367022 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d88hj\" (UniqueName: \"kubernetes.io/projected/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-kube-api-access-d88hj\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.367067 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-logs\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.386352 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.473479 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d88hj\" (UniqueName: \"kubernetes.io/projected/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-kube-api-access-d88hj\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.473526 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-logs\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.473570 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-scripts\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.473595 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-config-data\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.473685 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-horizon-secret-key\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.474640 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-logs\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.475613 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-scripts\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.476714 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-config-data\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.521373 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d88hj\" (UniqueName: \"kubernetes.io/projected/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-kube-api-access-d88hj\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.522583 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-horizon-secret-key\") pod \"horizon-69db6765d5-mgkh7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.533172 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.703702 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-rgcgg"] Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.728825 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vp95h"] Sep 29 14:03:23 crc kubenswrapper[4634]: I0929 14:03:23.778298 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:03:23 crc kubenswrapper[4634]: W0929 14:03:23.814028 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod061c94cb_cc6c_4a14_a0c4_4bcef38173b7.slice/crio-cba87749d56a9bef2541cb5db565769ce28f4f1e0b05a4b8a954a62216223c33 WatchSource:0}: Error finding container cba87749d56a9bef2541cb5db565769ce28f4f1e0b05a4b8a954a62216223c33: Status 404 returned error can't find the container with id cba87749d56a9bef2541cb5db565769ce28f4f1e0b05a4b8a954a62216223c33 Sep 29 14:03:24 crc kubenswrapper[4634]: W0929 14:03:24.141752 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf52e3024_e154_435d_892c_86a1b3344fb5.slice/crio-a5201a48af67c032cd81b152f3307c7fdd125ab4629fe55b2e78b378ef42f9eb WatchSource:0}: Error finding container a5201a48af67c032cd81b152f3307c7fdd125ab4629fe55b2e78b378ef42f9eb: Status 404 returned error can't find the container with id a5201a48af67c032cd81b152f3307c7fdd125ab4629fe55b2e78b378ef42f9eb Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.141907 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-59f849cb9-zxspc"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.142166 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-acca-account-create-nstwb"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.143323 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-acca-account-create-nstwb" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.150380 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.171853 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-acca-account-create-nstwb"] Sep 29 14:03:24 crc kubenswrapper[4634]: W0929 14:03:24.227559 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c1084d0_17b1_40a1_b57e_11e41ad8db3b.slice/crio-0cdd329c87c0cd98dbd1e4e99b8f26f6452ec5b88fe120e9a499eb2092713447 WatchSource:0}: Error finding container 0cdd329c87c0cd98dbd1e4e99b8f26f6452ec5b88fe120e9a499eb2092713447: Status 404 returned error can't find the container with id 0cdd329c87c0cd98dbd1e4e99b8f26f6452ec5b88fe120e9a499eb2092713447 Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.233237 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96nfl\" (UniqueName: \"kubernetes.io/projected/f608b599-da98-42fc-be9c-54554cf111a3-kube-api-access-96nfl\") pod \"cinder-acca-account-create-nstwb\" (UID: \"f608b599-da98-42fc-be9c-54554cf111a3\") " pod="openstack/cinder-acca-account-create-nstwb" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.235807 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-577dn"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.247681 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-zjh78"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.323392 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b8d7-account-create-mwmmq"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.325006 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8d7-account-create-mwmmq" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.329170 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.338611 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96nfl\" (UniqueName: \"kubernetes.io/projected/f608b599-da98-42fc-be9c-54554cf111a3-kube-api-access-96nfl\") pod \"cinder-acca-account-create-nstwb\" (UID: \"f608b599-da98-42fc-be9c-54554cf111a3\") " pod="openstack/cinder-acca-account-create-nstwb" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.367942 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b8d7-account-create-mwmmq"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.399048 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96nfl\" (UniqueName: \"kubernetes.io/projected/f608b599-da98-42fc-be9c-54554cf111a3-kube-api-access-96nfl\") pod \"cinder-acca-account-create-nstwb\" (UID: \"f608b599-da98-42fc-be9c-54554cf111a3\") " pod="openstack/cinder-acca-account-create-nstwb" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.422424 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-0479-account-create-qsvzr"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.437286 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0479-account-create-qsvzr" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.439406 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.440675 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfgxs\" (UniqueName: \"kubernetes.io/projected/0e21f408-bd65-4b12-8f48-dce2914e71ea-kube-api-access-bfgxs\") pod \"neutron-b8d7-account-create-mwmmq\" (UID: \"0e21f408-bd65-4b12-8f48-dce2914e71ea\") " pod="openstack/neutron-b8d7-account-create-mwmmq" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.457003 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0479-account-create-qsvzr"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.481723 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-acca-account-create-nstwb" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.487789 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-69db6765d5-mgkh7"] Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.543898 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw4sv\" (UniqueName: \"kubernetes.io/projected/53636ebe-a711-43c9-bad0-192536a09823-kube-api-access-rw4sv\") pod \"barbican-0479-account-create-qsvzr\" (UID: \"53636ebe-a711-43c9-bad0-192536a09823\") " pod="openstack/barbican-0479-account-create-qsvzr" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.544052 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfgxs\" (UniqueName: \"kubernetes.io/projected/0e21f408-bd65-4b12-8f48-dce2914e71ea-kube-api-access-bfgxs\") pod \"neutron-b8d7-account-create-mwmmq\" (UID: \"0e21f408-bd65-4b12-8f48-dce2914e71ea\") " pod="openstack/neutron-b8d7-account-create-mwmmq" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.564708 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfgxs\" (UniqueName: \"kubernetes.io/projected/0e21f408-bd65-4b12-8f48-dce2914e71ea-kube-api-access-bfgxs\") pod \"neutron-b8d7-account-create-mwmmq\" (UID: \"0e21f408-bd65-4b12-8f48-dce2914e71ea\") " pod="openstack/neutron-b8d7-account-create-mwmmq" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.650336 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw4sv\" (UniqueName: \"kubernetes.io/projected/53636ebe-a711-43c9-bad0-192536a09823-kube-api-access-rw4sv\") pod \"barbican-0479-account-create-qsvzr\" (UID: \"53636ebe-a711-43c9-bad0-192536a09823\") " pod="openstack/barbican-0479-account-create-qsvzr" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.672602 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8d7-account-create-mwmmq" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.715661 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-577dn" event={"ID":"1c1084d0-17b1-40a1-b57e-11e41ad8db3b","Type":"ContainerStarted","Data":"0cdd329c87c0cd98dbd1e4e99b8f26f6452ec5b88fe120e9a499eb2092713447"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.722667 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"061c94cb-cc6c-4a14-a0c4-4bcef38173b7","Type":"ContainerStarted","Data":"cba87749d56a9bef2541cb5db565769ce28f4f1e0b05a4b8a954a62216223c33"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.723945 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw4sv\" (UniqueName: \"kubernetes.io/projected/53636ebe-a711-43c9-bad0-192536a09823-kube-api-access-rw4sv\") pod \"barbican-0479-account-create-qsvzr\" (UID: \"53636ebe-a711-43c9-bad0-192536a09823\") " pod="openstack/barbican-0479-account-create-qsvzr" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.726827 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59f849cb9-zxspc" event={"ID":"f52e3024-e154-435d-892c-86a1b3344fb5","Type":"ContainerStarted","Data":"a5201a48af67c032cd81b152f3307c7fdd125ab4629fe55b2e78b378ef42f9eb"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.729816 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" event={"ID":"c74c2608-d914-4498-a016-603a32c1fd5c","Type":"ContainerStarted","Data":"48ecd851a5c185e9692cebc382112fe286b137775a78b6dd17347155956e3bb4"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.763890 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vp95h" event={"ID":"4cfe29ba-9318-4725-bd8d-771a6f1360c0","Type":"ContainerStarted","Data":"7232fd46d12e34755e56d2296a450516e7e42381703498145e70cb5444166dcd"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.763938 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vp95h" event={"ID":"4cfe29ba-9318-4725-bd8d-771a6f1360c0","Type":"ContainerStarted","Data":"573a475d69387b1ddaf8c7aa7f26b387f24f406f59120108c46d982d68144816"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.782111 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0479-account-create-qsvzr" Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.789784 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69db6765d5-mgkh7" event={"ID":"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7","Type":"ContainerStarted","Data":"10bf616c93e0256d325bb544b2341fc731a86fb4531dfca93151c811b91106da"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.804821 4634 generic.go:334] "Generic (PLEG): container finished" podID="483d069a-57ad-49fd-80a4-d16456e7e894" containerID="8103cca02c7fc320947ab42174666a0d078ce5337715b94c428bcdd040f2c17d" exitCode=0 Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.806322 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" event={"ID":"483d069a-57ad-49fd-80a4-d16456e7e894","Type":"ContainerDied","Data":"8103cca02c7fc320947ab42174666a0d078ce5337715b94c428bcdd040f2c17d"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.806436 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" event={"ID":"483d069a-57ad-49fd-80a4-d16456e7e894","Type":"ContainerStarted","Data":"e70052955bc7654d43177a276ad3daee62a9cff923bea5d98ac762f9c094f221"} Sep 29 14:03:24 crc kubenswrapper[4634]: I0929 14:03:24.830608 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-vp95h" podStartSLOduration=3.830585617 podStartE2EDuration="3.830585617s" podCreationTimestamp="2025-09-29 14:03:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:03:24.794925142 +0000 UTC m=+1135.363652891" watchObservedRunningTime="2025-09-29 14:03:24.830585617 +0000 UTC m=+1135.399313356" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.051341 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-acca-account-create-nstwb"] Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.429210 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-59f849cb9-zxspc"] Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.430126 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.477839 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-66cbbc7c87-vxsv2"] Sep 29 14:03:25 crc kubenswrapper[4634]: E0929 14:03:25.478307 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="483d069a-57ad-49fd-80a4-d16456e7e894" containerName="init" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.478320 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="483d069a-57ad-49fd-80a4-d16456e7e894" containerName="init" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.478537 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="483d069a-57ad-49fd-80a4-d16456e7e894" containerName="init" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.477861 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-swift-storage-0\") pod \"483d069a-57ad-49fd-80a4-d16456e7e894\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.479193 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-svc\") pod \"483d069a-57ad-49fd-80a4-d16456e7e894\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.479246 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-nb\") pod \"483d069a-57ad-49fd-80a4-d16456e7e894\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.479285 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85c2x\" (UniqueName: \"kubernetes.io/projected/483d069a-57ad-49fd-80a4-d16456e7e894-kube-api-access-85c2x\") pod \"483d069a-57ad-49fd-80a4-d16456e7e894\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.479382 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-sb\") pod \"483d069a-57ad-49fd-80a4-d16456e7e894\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.479504 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-config\") pod \"483d069a-57ad-49fd-80a4-d16456e7e894\" (UID: \"483d069a-57ad-49fd-80a4-d16456e7e894\") " Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.479525 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.512912 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/483d069a-57ad-49fd-80a4-d16456e7e894-kube-api-access-85c2x" (OuterVolumeSpecName: "kube-api-access-85c2x") pod "483d069a-57ad-49fd-80a4-d16456e7e894" (UID: "483d069a-57ad-49fd-80a4-d16456e7e894"). InnerVolumeSpecName "kube-api-access-85c2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.545441 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-66cbbc7c87-vxsv2"] Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.553772 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "483d069a-57ad-49fd-80a4-d16456e7e894" (UID: "483d069a-57ad-49fd-80a4-d16456e7e894"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.604314 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "483d069a-57ad-49fd-80a4-d16456e7e894" (UID: "483d069a-57ad-49fd-80a4-d16456e7e894"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.604629 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-config" (OuterVolumeSpecName: "config") pod "483d069a-57ad-49fd-80a4-d16456e7e894" (UID: "483d069a-57ad-49fd-80a4-d16456e7e894"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.607441 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-config-data\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.612243 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d46c94b2-04bd-4cc3-a706-daa89146532f-horizon-secret-key\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.612782 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46c94b2-04bd-4cc3-a706-daa89146532f-logs\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.612984 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97p75\" (UniqueName: \"kubernetes.io/projected/d46c94b2-04bd-4cc3-a706-daa89146532f-kube-api-access-97p75\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.621382 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-scripts\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.621892 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.621910 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85c2x\" (UniqueName: \"kubernetes.io/projected/483d069a-57ad-49fd-80a4-d16456e7e894-kube-api-access-85c2x\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.621922 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.621934 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.624809 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "483d069a-57ad-49fd-80a4-d16456e7e894" (UID: "483d069a-57ad-49fd-80a4-d16456e7e894"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.626304 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b8d7-account-create-mwmmq"] Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.631010 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "483d069a-57ad-49fd-80a4-d16456e7e894" (UID: "483d069a-57ad-49fd-80a4-d16456e7e894"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:25 crc kubenswrapper[4634]: W0929 14:03:25.639156 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e21f408_bd65_4b12_8f48_dce2914e71ea.slice/crio-9eac178296c6ae35e3b02c1aa753df817c4fb148723dd0b96b71279c6c846a8a WatchSource:0}: Error finding container 9eac178296c6ae35e3b02c1aa753df817c4fb148723dd0b96b71279c6c846a8a: Status 404 returned error can't find the container with id 9eac178296c6ae35e3b02c1aa753df817c4fb148723dd0b96b71279c6c846a8a Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.723314 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d46c94b2-04bd-4cc3-a706-daa89146532f-horizon-secret-key\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.723402 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46c94b2-04bd-4cc3-a706-daa89146532f-logs\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.723461 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97p75\" (UniqueName: \"kubernetes.io/projected/d46c94b2-04bd-4cc3-a706-daa89146532f-kube-api-access-97p75\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.723498 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-scripts\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.723602 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-config-data\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.723653 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.723665 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/483d069a-57ad-49fd-80a4-d16456e7e894-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.724905 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46c94b2-04bd-4cc3-a706-daa89146532f-logs\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.726266 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-scripts\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.726632 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-config-data\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.746880 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d46c94b2-04bd-4cc3-a706-daa89146532f-horizon-secret-key\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.751962 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0479-account-create-qsvzr"] Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.774719 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97p75\" (UniqueName: \"kubernetes.io/projected/d46c94b2-04bd-4cc3-a706-daa89146532f-kube-api-access-97p75\") pod \"horizon-66cbbc7c87-vxsv2\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.785209 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.842068 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8d7-account-create-mwmmq" event={"ID":"0e21f408-bd65-4b12-8f48-dce2914e71ea","Type":"ContainerStarted","Data":"9eac178296c6ae35e3b02c1aa753df817c4fb148723dd0b96b71279c6c846a8a"} Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.844503 4634 generic.go:334] "Generic (PLEG): container finished" podID="c74c2608-d914-4498-a016-603a32c1fd5c" containerID="bc362c12268e249114c4ab48be51147c1fa9af11fc2cd60a591c3ec307543480" exitCode=0 Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.844575 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" event={"ID":"c74c2608-d914-4498-a016-603a32c1fd5c","Type":"ContainerDied","Data":"bc362c12268e249114c4ab48be51147c1fa9af11fc2cd60a591c3ec307543480"} Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.860837 4634 generic.go:334] "Generic (PLEG): container finished" podID="f608b599-da98-42fc-be9c-54554cf111a3" containerID="6ac1ab9378ea3d79c5cd67906f9795bfb68650cbdfb5b1237559e60fbba1ee9e" exitCode=0 Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.860916 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-acca-account-create-nstwb" event={"ID":"f608b599-da98-42fc-be9c-54554cf111a3","Type":"ContainerDied","Data":"6ac1ab9378ea3d79c5cd67906f9795bfb68650cbdfb5b1237559e60fbba1ee9e"} Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.860952 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-acca-account-create-nstwb" event={"ID":"f608b599-da98-42fc-be9c-54554cf111a3","Type":"ContainerStarted","Data":"72d6ef819bab789b2830b05cbc6e04bfe4d91983e93efb0d36231afc7a28154f"} Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.879760 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.881590 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55fff446b9-rgcgg" event={"ID":"483d069a-57ad-49fd-80a4-d16456e7e894","Type":"ContainerDied","Data":"e70052955bc7654d43177a276ad3daee62a9cff923bea5d98ac762f9c094f221"} Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.881657 4634 scope.go:117] "RemoveContainer" containerID="8103cca02c7fc320947ab42174666a0d078ce5337715b94c428bcdd040f2c17d" Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.990100 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-rgcgg"] Sep 29 14:03:25 crc kubenswrapper[4634]: I0929 14:03:25.996573 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55fff446b9-rgcgg"] Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.016291 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.126672 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="483d069a-57ad-49fd-80a4-d16456e7e894" path="/var/lib/kubelet/pods/483d069a-57ad-49fd-80a4-d16456e7e894/volumes" Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.665212 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-66cbbc7c87-vxsv2"] Sep 29 14:03:26 crc kubenswrapper[4634]: W0929 14:03:26.685563 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd46c94b2_04bd_4cc3_a706_daa89146532f.slice/crio-2af78e93c2e5078f93c5e9253c0d1d7c1bcd9f174c419204f83870dde132fe61 WatchSource:0}: Error finding container 2af78e93c2e5078f93c5e9253c0d1d7c1bcd9f174c419204f83870dde132fe61: Status 404 returned error can't find the container with id 2af78e93c2e5078f93c5e9253c0d1d7c1bcd9f174c419204f83870dde132fe61 Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.897647 4634 generic.go:334] "Generic (PLEG): container finished" podID="0e21f408-bd65-4b12-8f48-dce2914e71ea" containerID="d730ead07dad9c63ea9062c8d087b4d20d4fb00b74390ea490db7d292159f9e4" exitCode=0 Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.897763 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8d7-account-create-mwmmq" event={"ID":"0e21f408-bd65-4b12-8f48-dce2914e71ea","Type":"ContainerDied","Data":"d730ead07dad9c63ea9062c8d087b4d20d4fb00b74390ea490db7d292159f9e4"} Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.902192 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" event={"ID":"c74c2608-d914-4498-a016-603a32c1fd5c","Type":"ContainerStarted","Data":"b0e50e76221f9aeae9ee0a28d05474748ce117d08c0f703a288f3e0f46c63610"} Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.903352 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.912754 4634 generic.go:334] "Generic (PLEG): container finished" podID="53636ebe-a711-43c9-bad0-192536a09823" containerID="adc5419551dd448ee50d381c71870cd1922e3728a99bdf7930bb6220c24d17ec" exitCode=0 Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.912851 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0479-account-create-qsvzr" event={"ID":"53636ebe-a711-43c9-bad0-192536a09823","Type":"ContainerDied","Data":"adc5419551dd448ee50d381c71870cd1922e3728a99bdf7930bb6220c24d17ec"} Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.912889 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0479-account-create-qsvzr" event={"ID":"53636ebe-a711-43c9-bad0-192536a09823","Type":"ContainerStarted","Data":"7056a84dc1bff1c9479af285442c43f1d40179f7059e873c51125a0ef0baf42a"} Sep 29 14:03:26 crc kubenswrapper[4634]: I0929 14:03:26.928527 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66cbbc7c87-vxsv2" event={"ID":"d46c94b2-04bd-4cc3-a706-daa89146532f","Type":"ContainerStarted","Data":"2af78e93c2e5078f93c5e9253c0d1d7c1bcd9f174c419204f83870dde132fe61"} Sep 29 14:03:27 crc kubenswrapper[4634]: I0929 14:03:27.337657 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-acca-account-create-nstwb" Sep 29 14:03:27 crc kubenswrapper[4634]: I0929 14:03:27.365649 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96nfl\" (UniqueName: \"kubernetes.io/projected/f608b599-da98-42fc-be9c-54554cf111a3-kube-api-access-96nfl\") pod \"f608b599-da98-42fc-be9c-54554cf111a3\" (UID: \"f608b599-da98-42fc-be9c-54554cf111a3\") " Sep 29 14:03:27 crc kubenswrapper[4634]: I0929 14:03:27.365937 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" podStartSLOduration=5.365905259 podStartE2EDuration="5.365905259s" podCreationTimestamp="2025-09-29 14:03:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:03:26.99209571 +0000 UTC m=+1137.560823459" watchObservedRunningTime="2025-09-29 14:03:27.365905259 +0000 UTC m=+1137.934633008" Sep 29 14:03:27 crc kubenswrapper[4634]: I0929 14:03:27.398383 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f608b599-da98-42fc-be9c-54554cf111a3-kube-api-access-96nfl" (OuterVolumeSpecName: "kube-api-access-96nfl") pod "f608b599-da98-42fc-be9c-54554cf111a3" (UID: "f608b599-da98-42fc-be9c-54554cf111a3"). InnerVolumeSpecName "kube-api-access-96nfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:27 crc kubenswrapper[4634]: I0929 14:03:27.468140 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96nfl\" (UniqueName: \"kubernetes.io/projected/f608b599-da98-42fc-be9c-54554cf111a3-kube-api-access-96nfl\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:27 crc kubenswrapper[4634]: I0929 14:03:27.941304 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-acca-account-create-nstwb" Sep 29 14:03:27 crc kubenswrapper[4634]: I0929 14:03:27.950752 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-acca-account-create-nstwb" event={"ID":"f608b599-da98-42fc-be9c-54554cf111a3","Type":"ContainerDied","Data":"72d6ef819bab789b2830b05cbc6e04bfe4d91983e93efb0d36231afc7a28154f"} Sep 29 14:03:27 crc kubenswrapper[4634]: I0929 14:03:27.950792 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72d6ef819bab789b2830b05cbc6e04bfe4d91983e93efb0d36231afc7a28154f" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.438749 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8d7-account-create-mwmmq" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.591489 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfgxs\" (UniqueName: \"kubernetes.io/projected/0e21f408-bd65-4b12-8f48-dce2914e71ea-kube-api-access-bfgxs\") pod \"0e21f408-bd65-4b12-8f48-dce2914e71ea\" (UID: \"0e21f408-bd65-4b12-8f48-dce2914e71ea\") " Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.654002 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e21f408-bd65-4b12-8f48-dce2914e71ea-kube-api-access-bfgxs" (OuterVolumeSpecName: "kube-api-access-bfgxs") pod "0e21f408-bd65-4b12-8f48-dce2914e71ea" (UID: "0e21f408-bd65-4b12-8f48-dce2914e71ea"). InnerVolumeSpecName "kube-api-access-bfgxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.681398 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0479-account-create-qsvzr" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.695262 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfgxs\" (UniqueName: \"kubernetes.io/projected/0e21f408-bd65-4b12-8f48-dce2914e71ea-kube-api-access-bfgxs\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.797451 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw4sv\" (UniqueName: \"kubernetes.io/projected/53636ebe-a711-43c9-bad0-192536a09823-kube-api-access-rw4sv\") pod \"53636ebe-a711-43c9-bad0-192536a09823\" (UID: \"53636ebe-a711-43c9-bad0-192536a09823\") " Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.801347 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53636ebe-a711-43c9-bad0-192536a09823-kube-api-access-rw4sv" (OuterVolumeSpecName: "kube-api-access-rw4sv") pod "53636ebe-a711-43c9-bad0-192536a09823" (UID: "53636ebe-a711-43c9-bad0-192536a09823"). InnerVolumeSpecName "kube-api-access-rw4sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.900061 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw4sv\" (UniqueName: \"kubernetes.io/projected/53636ebe-a711-43c9-bad0-192536a09823-kube-api-access-rw4sv\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.963177 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b8d7-account-create-mwmmq" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.963227 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b8d7-account-create-mwmmq" event={"ID":"0e21f408-bd65-4b12-8f48-dce2914e71ea","Type":"ContainerDied","Data":"9eac178296c6ae35e3b02c1aa753df817c4fb148723dd0b96b71279c6c846a8a"} Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.963266 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9eac178296c6ae35e3b02c1aa753df817c4fb148723dd0b96b71279c6c846a8a" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.967067 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0479-account-create-qsvzr" Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.967393 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0479-account-create-qsvzr" event={"ID":"53636ebe-a711-43c9-bad0-192536a09823","Type":"ContainerDied","Data":"7056a84dc1bff1c9479af285442c43f1d40179f7059e873c51125a0ef0baf42a"} Sep 29 14:03:28 crc kubenswrapper[4634]: I0929 14:03:28.967485 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7056a84dc1bff1c9479af285442c43f1d40179f7059e873c51125a0ef0baf42a" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.318039 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-dsvdn"] Sep 29 14:03:29 crc kubenswrapper[4634]: E0929 14:03:29.318744 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f608b599-da98-42fc-be9c-54554cf111a3" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.318759 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="f608b599-da98-42fc-be9c-54554cf111a3" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: E0929 14:03:29.318784 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e21f408-bd65-4b12-8f48-dce2914e71ea" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.318791 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e21f408-bd65-4b12-8f48-dce2914e71ea" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: E0929 14:03:29.318820 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53636ebe-a711-43c9-bad0-192536a09823" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.318826 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="53636ebe-a711-43c9-bad0-192536a09823" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.322530 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="53636ebe-a711-43c9-bad0-192536a09823" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.322576 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="f608b599-da98-42fc-be9c-54554cf111a3" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.322589 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e21f408-bd65-4b12-8f48-dce2914e71ea" containerName="mariadb-account-create" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.323465 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.326598 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-mxxp8" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.326784 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.334638 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.343395 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-dsvdn"] Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.411110 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-db-sync-config-data\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.411779 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-combined-ca-bundle\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.411809 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-config-data\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.411874 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5tsd\" (UniqueName: \"kubernetes.io/projected/519a22f3-5513-430a-bd2b-6670ece06c2d-kube-api-access-k5tsd\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.411923 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-scripts\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.412013 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/519a22f3-5513-430a-bd2b-6670ece06c2d-etc-machine-id\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.514207 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-combined-ca-bundle\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.514289 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-config-data\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.514333 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5tsd\" (UniqueName: \"kubernetes.io/projected/519a22f3-5513-430a-bd2b-6670ece06c2d-kube-api-access-k5tsd\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.514354 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-scripts\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.514398 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/519a22f3-5513-430a-bd2b-6670ece06c2d-etc-machine-id\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.514437 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-db-sync-config-data\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.516158 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/519a22f3-5513-430a-bd2b-6670ece06c2d-etc-machine-id\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.522689 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-scripts\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.523130 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-db-sync-config-data\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.524613 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-config-data\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.535392 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-combined-ca-bundle\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.537780 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5tsd\" (UniqueName: \"kubernetes.io/projected/519a22f3-5513-430a-bd2b-6670ece06c2d-kube-api-access-k5tsd\") pod \"cinder-db-sync-dsvdn\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:29 crc kubenswrapper[4634]: I0929 14:03:29.726655 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:03:30 crc kubenswrapper[4634]: I0929 14:03:30.986475 4634 generic.go:334] "Generic (PLEG): container finished" podID="4cfe29ba-9318-4725-bd8d-771a6f1360c0" containerID="7232fd46d12e34755e56d2296a450516e7e42381703498145e70cb5444166dcd" exitCode=0 Sep 29 14:03:30 crc kubenswrapper[4634]: I0929 14:03:30.986698 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vp95h" event={"ID":"4cfe29ba-9318-4725-bd8d-771a6f1360c0","Type":"ContainerDied","Data":"7232fd46d12e34755e56d2296a450516e7e42381703498145e70cb5444166dcd"} Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.180357 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-69db6765d5-mgkh7"] Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.237348 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7bbc59f76-j4rjv"] Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.242453 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.253391 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.269211 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7bbc59f76-j4rjv"] Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.358049 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nvgb\" (UniqueName: \"kubernetes.io/projected/1427174f-e673-4c3e-bf36-f1463327fd61-kube-api-access-8nvgb\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.358199 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-scripts\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.358249 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-config-data\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.358278 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-tls-certs\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.358453 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1427174f-e673-4c3e-bf36-f1463327fd61-logs\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.358498 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-secret-key\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.358523 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-combined-ca-bundle\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.365503 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-66cbbc7c87-vxsv2"] Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.400318 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5d5866c49b-9tt6g"] Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.402004 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.430847 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d5866c49b-9tt6g"] Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460232 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-horizon-secret-key\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460285 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24cc4bfc-123a-479d-afb7-ca6b62cd7754-logs\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460311 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-horizon-tls-certs\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460335 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24cc4bfc-123a-479d-afb7-ca6b62cd7754-config-data\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460368 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1427174f-e673-4c3e-bf36-f1463327fd61-logs\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460511 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-secret-key\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460578 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzw5q\" (UniqueName: \"kubernetes.io/projected/24cc4bfc-123a-479d-afb7-ca6b62cd7754-kube-api-access-hzw5q\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460612 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-combined-ca-bundle\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460649 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nvgb\" (UniqueName: \"kubernetes.io/projected/1427174f-e673-4c3e-bf36-f1463327fd61-kube-api-access-8nvgb\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460746 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1427174f-e673-4c3e-bf36-f1463327fd61-logs\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460768 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-scripts\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460849 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-config-data\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460882 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-combined-ca-bundle\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460912 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-tls-certs\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.460932 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24cc4bfc-123a-479d-afb7-ca6b62cd7754-scripts\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.462355 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-scripts\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.462536 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-config-data\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.468997 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-secret-key\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.469757 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-tls-certs\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.473862 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-combined-ca-bundle\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.507191 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nvgb\" (UniqueName: \"kubernetes.io/projected/1427174f-e673-4c3e-bf36-f1463327fd61-kube-api-access-8nvgb\") pod \"horizon-7bbc59f76-j4rjv\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.562848 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-combined-ca-bundle\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.562906 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24cc4bfc-123a-479d-afb7-ca6b62cd7754-scripts\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.562950 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-horizon-secret-key\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.562978 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24cc4bfc-123a-479d-afb7-ca6b62cd7754-logs\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.562998 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-horizon-tls-certs\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.563024 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24cc4bfc-123a-479d-afb7-ca6b62cd7754-config-data\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.563101 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzw5q\" (UniqueName: \"kubernetes.io/projected/24cc4bfc-123a-479d-afb7-ca6b62cd7754-kube-api-access-hzw5q\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.563807 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/24cc4bfc-123a-479d-afb7-ca6b62cd7754-logs\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.563862 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24cc4bfc-123a-479d-afb7-ca6b62cd7754-scripts\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.565794 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24cc4bfc-123a-479d-afb7-ca6b62cd7754-config-data\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.568982 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-combined-ca-bundle\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.569309 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-horizon-secret-key\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.577452 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/24cc4bfc-123a-479d-afb7-ca6b62cd7754-horizon-tls-certs\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.582350 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzw5q\" (UniqueName: \"kubernetes.io/projected/24cc4bfc-123a-479d-afb7-ca6b62cd7754-kube-api-access-hzw5q\") pod \"horizon-5d5866c49b-9tt6g\" (UID: \"24cc4bfc-123a-479d-afb7-ca6b62cd7754\") " pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.595095 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:03:31 crc kubenswrapper[4634]: I0929 14:03:31.725524 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:03:33 crc kubenswrapper[4634]: I0929 14:03:33.362071 4634 scope.go:117] "RemoveContainer" containerID="38052a34c8ca1fe31485ad0f14e5006123770bb7dd52a67968e7daba0309328f" Sep 29 14:03:33 crc kubenswrapper[4634]: I0929 14:03:33.390107 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:03:33 crc kubenswrapper[4634]: I0929 14:03:33.466307 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-w88gx"] Sep 29 14:03:33 crc kubenswrapper[4634]: I0929 14:03:33.469063 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="dnsmasq-dns" containerID="cri-o://ed079008aeefce5858d982d3d33a2d5b455bd2a86cbe956c8de628b65a9b354a" gracePeriod=10 Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.052858 4634 generic.go:334] "Generic (PLEG): container finished" podID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerID="ed079008aeefce5858d982d3d33a2d5b455bd2a86cbe956c8de628b65a9b354a" exitCode=0 Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.052940 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" event={"ID":"ab8abb49-8275-41ed-b2b9-1bff56c5790c","Type":"ContainerDied","Data":"ed079008aeefce5858d982d3d33a2d5b455bd2a86cbe956c8de628b65a9b354a"} Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.273992 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.746211 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-bqjnz"] Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.750649 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.754556 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-75qvh" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.755036 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.755490 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.773831 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bqjnz"] Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.829700 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-config\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.829787 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9twx\" (UniqueName: \"kubernetes.io/projected/aac1358d-d39f-4732-97fa-cc8947c81bdb-kube-api-access-w9twx\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.830159 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-combined-ca-bundle\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.933100 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9twx\" (UniqueName: \"kubernetes.io/projected/aac1358d-d39f-4732-97fa-cc8947c81bdb-kube-api-access-w9twx\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.933177 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-combined-ca-bundle\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.933268 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-config\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.941952 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-combined-ca-bundle\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.945115 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-config\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:34 crc kubenswrapper[4634]: I0929 14:03:34.956615 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9twx\" (UniqueName: \"kubernetes.io/projected/aac1358d-d39f-4732-97fa-cc8947c81bdb-kube-api-access-w9twx\") pod \"neutron-db-sync-bqjnz\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:35 crc kubenswrapper[4634]: I0929 14:03:35.078277 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.369176 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-6927h"] Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.370624 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.373407 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-brpbz" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.373460 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.392519 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-6927h"] Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.464632 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7hbf\" (UniqueName: \"kubernetes.io/projected/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-kube-api-access-v7hbf\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.464685 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-db-sync-config-data\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.464743 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-combined-ca-bundle\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.566199 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7hbf\" (UniqueName: \"kubernetes.io/projected/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-kube-api-access-v7hbf\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.566608 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-db-sync-config-data\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.566689 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-combined-ca-bundle\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.580854 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-db-sync-config-data\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.591198 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-combined-ca-bundle\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.595906 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7hbf\" (UniqueName: \"kubernetes.io/projected/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-kube-api-access-v7hbf\") pod \"barbican-db-sync-6927h\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:36 crc kubenswrapper[4634]: I0929 14:03:36.700337 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6927h" Sep 29 14:03:39 crc kubenswrapper[4634]: I0929 14:03:39.273976 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: connect: connection refused" Sep 29 14:03:44 crc kubenswrapper[4634]: E0929 14:03:44.050625 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 14:03:44 crc kubenswrapper[4634]: E0929 14:03:44.051569 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68fh5b8h5ch694h5f5h5fch54bh686h66dh64dh598h64bhb9h56ch676hd5h548h5f8h57bh65fh597h554hf4h59ch78hf9h674h5b5h5cbh598h586h78q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-97p75,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-66cbbc7c87-vxsv2_openstack(d46c94b2-04bd-4cc3-a706-daa89146532f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:03:44 crc kubenswrapper[4634]: E0929 14:03:44.053956 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-66cbbc7c87-vxsv2" podUID="d46c94b2-04bd-4cc3-a706-daa89146532f" Sep 29 14:03:44 crc kubenswrapper[4634]: I0929 14:03:44.396194 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:03:44 crc kubenswrapper[4634]: I0929 14:03:44.396608 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:03:44 crc kubenswrapper[4634]: I0929 14:03:44.396666 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:03:44 crc kubenswrapper[4634]: I0929 14:03:44.397469 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"57b4d47644425468a03fbc283811a82747ba711e9f6742c5de405a2bc380e087"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:03:44 crc kubenswrapper[4634]: I0929 14:03:44.397517 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://57b4d47644425468a03fbc283811a82747ba711e9f6742c5de405a2bc380e087" gracePeriod=600 Sep 29 14:03:44 crc kubenswrapper[4634]: E0929 14:03:44.977537 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Sep 29 14:03:44 crc kubenswrapper[4634]: E0929 14:03:44.977797 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n567h599h669h5f9h54dh58ch5d8h5b7h696h667hf5h645h59ch68ch665h7bhcdh5fh75h5bbh55ch588h57bh5bfh55ch6bh58fh68hd8h574h686h54fq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-csfmr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(061c94cb-cc6c-4a14-a0c4-4bcef38173b7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.069730 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.197366 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="57b4d47644425468a03fbc283811a82747ba711e9f6742c5de405a2bc380e087" exitCode=0 Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.197897 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"57b4d47644425468a03fbc283811a82747ba711e9f6742c5de405a2bc380e087"} Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.197994 4634 scope.go:117] "RemoveContainer" containerID="66e5f7cddcf0d5a52ca4459df7c3e5983f76e1e654e4c50e5ebc51cf61af5126" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.205971 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vp95h" event={"ID":"4cfe29ba-9318-4725-bd8d-771a6f1360c0","Type":"ContainerDied","Data":"573a475d69387b1ddaf8c7aa7f26b387f24f406f59120108c46d982d68144816"} Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.206012 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="573a475d69387b1ddaf8c7aa7f26b387f24f406f59120108c46d982d68144816" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.206075 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vp95h" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.248626 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-fernet-keys\") pod \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.248711 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-config-data\") pod \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.248808 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p74dm\" (UniqueName: \"kubernetes.io/projected/4cfe29ba-9318-4725-bd8d-771a6f1360c0-kube-api-access-p74dm\") pod \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.248888 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-combined-ca-bundle\") pod \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.248929 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-scripts\") pod \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.248996 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-credential-keys\") pod \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\" (UID: \"4cfe29ba-9318-4725-bd8d-771a6f1360c0\") " Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.269476 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4cfe29ba-9318-4725-bd8d-771a6f1360c0" (UID: "4cfe29ba-9318-4725-bd8d-771a6f1360c0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.290279 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4cfe29ba-9318-4725-bd8d-771a6f1360c0" (UID: "4cfe29ba-9318-4725-bd8d-771a6f1360c0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.296517 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cfe29ba-9318-4725-bd8d-771a6f1360c0" (UID: "4cfe29ba-9318-4725-bd8d-771a6f1360c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.299550 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cfe29ba-9318-4725-bd8d-771a6f1360c0-kube-api-access-p74dm" (OuterVolumeSpecName: "kube-api-access-p74dm") pod "4cfe29ba-9318-4725-bd8d-771a6f1360c0" (UID: "4cfe29ba-9318-4725-bd8d-771a6f1360c0"). InnerVolumeSpecName "kube-api-access-p74dm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.309191 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-scripts" (OuterVolumeSpecName: "scripts") pod "4cfe29ba-9318-4725-bd8d-771a6f1360c0" (UID: "4cfe29ba-9318-4725-bd8d-771a6f1360c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.330832 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-config-data" (OuterVolumeSpecName: "config-data") pod "4cfe29ba-9318-4725-bd8d-771a6f1360c0" (UID: "4cfe29ba-9318-4725-bd8d-771a6f1360c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.351361 4634 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.351404 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.351420 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p74dm\" (UniqueName: \"kubernetes.io/projected/4cfe29ba-9318-4725-bd8d-771a6f1360c0-kube-api-access-p74dm\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.351432 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.351442 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:45 crc kubenswrapper[4634]: I0929 14:03:45.351450 4634 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4cfe29ba-9318-4725-bd8d-771a6f1360c0-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.190300 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-vp95h"] Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.198556 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-vp95h"] Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.223906 4634 generic.go:334] "Generic (PLEG): container finished" podID="43cee9c9-8b49-4b42-a525-ccd9ab1a9730" containerID="0ab2e3ac4ec4ec2bb0e10bd7f8c433d0a587dc58fb1b8544e95a404a5a38cc04" exitCode=0 Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.224270 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-25zsb" event={"ID":"43cee9c9-8b49-4b42-a525-ccd9ab1a9730","Type":"ContainerDied","Data":"0ab2e3ac4ec4ec2bb0e10bd7f8c433d0a587dc58fb1b8544e95a404a5a38cc04"} Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.281013 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-rp2lh"] Sep 29 14:03:46 crc kubenswrapper[4634]: E0929 14:03:46.281582 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cfe29ba-9318-4725-bd8d-771a6f1360c0" containerName="keystone-bootstrap" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.281607 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cfe29ba-9318-4725-bd8d-771a6f1360c0" containerName="keystone-bootstrap" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.281781 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cfe29ba-9318-4725-bd8d-771a6f1360c0" containerName="keystone-bootstrap" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.282427 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.285443 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.285904 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-l544j" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.286140 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.286305 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.347249 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-rp2lh"] Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.371169 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-credential-keys\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.371276 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-scripts\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.371300 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-fernet-keys\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.371360 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-combined-ca-bundle\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.371392 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkxwv\" (UniqueName: \"kubernetes.io/projected/1dfb65bf-8466-4184-a169-755fbb7d65d9-kube-api-access-gkxwv\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.371410 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-config-data\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.472127 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkxwv\" (UniqueName: \"kubernetes.io/projected/1dfb65bf-8466-4184-a169-755fbb7d65d9-kube-api-access-gkxwv\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.472203 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-config-data\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.472243 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-credential-keys\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.472294 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-scripts\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.472322 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-fernet-keys\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.472382 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-combined-ca-bundle\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.484725 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-combined-ca-bundle\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.485514 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-fernet-keys\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.485784 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-credential-keys\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.486355 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-config-data\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.489043 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-scripts\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.489877 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkxwv\" (UniqueName: \"kubernetes.io/projected/1dfb65bf-8466-4184-a169-755fbb7d65d9-kube-api-access-gkxwv\") pod \"keystone-bootstrap-rp2lh\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:46 crc kubenswrapper[4634]: I0929 14:03:46.662997 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.175407 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.175562 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l6pvg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-577dn_openstack(1c1084d0-17b1-40a1-b57e-11e41ad8db3b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.176902 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-577dn" podUID="1c1084d0-17b1-40a1-b57e-11e41ad8db3b" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.199435 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.200030 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nd8hc5h548h557h67bhddhb9h59fh5c4h544h59dh8bh648h666h54h687h658h665h575h689hd6h88h686h5ddh5cdh577h5dbh64h64bh5c8h544h5cdq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d88hj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-69db6765d5-mgkh7_openstack(b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.202625 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-69db6765d5-mgkh7" podUID="b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.212961 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.213258 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5f7h5f5h674hdbh647h59ch59chcch644h5fdh55h8h5ch84h5b4h5cdh5fch5bdh68dh645h5cfh5cch55ch599h55ch9dh86hcdh677h5cfh5b8h5fcq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mmk5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-59f849cb9-zxspc_openstack(f52e3024-e154-435d-892c-86a1b3344fb5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.223466 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-59f849cb9-zxspc" podUID="f52e3024-e154-435d-892c-86a1b3344fb5" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.241413 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66cbbc7c87-vxsv2" event={"ID":"d46c94b2-04bd-4cc3-a706-daa89146532f","Type":"ContainerDied","Data":"2af78e93c2e5078f93c5e9253c0d1d7c1bcd9f174c419204f83870dde132fe61"} Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.241465 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2af78e93c2e5078f93c5e9253c0d1d7c1bcd9f174c419204f83870dde132fe61" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.250066 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" event={"ID":"ab8abb49-8275-41ed-b2b9-1bff56c5790c","Type":"ContainerDied","Data":"855e968948ffc9ae92764ea7b7eee47e1e45fa7212157271700127154ba9211c"} Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.250174 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="855e968948ffc9ae92764ea7b7eee47e1e45fa7212157271700127154ba9211c" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.254798 4634 scope.go:117] "RemoveContainer" containerID="39f2151d1503f2eaf2292288944d56afd47463d45d010e3ce308f5eb4c644248" Sep 29 14:03:47 crc kubenswrapper[4634]: E0929 14:03:47.262769 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-577dn" podUID="1c1084d0-17b1-40a1-b57e-11e41ad8db3b" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.286751 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.347413 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398265 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46c94b2-04bd-4cc3-a706-daa89146532f-logs\") pod \"d46c94b2-04bd-4cc3-a706-daa89146532f\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398402 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-sb\") pod \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398430 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-config-data\") pod \"d46c94b2-04bd-4cc3-a706-daa89146532f\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398531 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-swift-storage-0\") pod \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398566 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-config\") pod \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398623 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-nb\") pod \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398663 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-scripts\") pod \"d46c94b2-04bd-4cc3-a706-daa89146532f\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398688 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fpmr\" (UniqueName: \"kubernetes.io/projected/ab8abb49-8275-41ed-b2b9-1bff56c5790c-kube-api-access-2fpmr\") pod \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398718 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d46c94b2-04bd-4cc3-a706-daa89146532f-horizon-secret-key\") pod \"d46c94b2-04bd-4cc3-a706-daa89146532f\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398740 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97p75\" (UniqueName: \"kubernetes.io/projected/d46c94b2-04bd-4cc3-a706-daa89146532f-kube-api-access-97p75\") pod \"d46c94b2-04bd-4cc3-a706-daa89146532f\" (UID: \"d46c94b2-04bd-4cc3-a706-daa89146532f\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.398771 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-svc\") pod \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\" (UID: \"ab8abb49-8275-41ed-b2b9-1bff56c5790c\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.406322 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d46c94b2-04bd-4cc3-a706-daa89146532f-logs" (OuterVolumeSpecName: "logs") pod "d46c94b2-04bd-4cc3-a706-daa89146532f" (UID: "d46c94b2-04bd-4cc3-a706-daa89146532f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.408105 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-config-data" (OuterVolumeSpecName: "config-data") pod "d46c94b2-04bd-4cc3-a706-daa89146532f" (UID: "d46c94b2-04bd-4cc3-a706-daa89146532f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.409877 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-scripts" (OuterVolumeSpecName: "scripts") pod "d46c94b2-04bd-4cc3-a706-daa89146532f" (UID: "d46c94b2-04bd-4cc3-a706-daa89146532f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.415144 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab8abb49-8275-41ed-b2b9-1bff56c5790c-kube-api-access-2fpmr" (OuterVolumeSpecName: "kube-api-access-2fpmr") pod "ab8abb49-8275-41ed-b2b9-1bff56c5790c" (UID: "ab8abb49-8275-41ed-b2b9-1bff56c5790c"). InnerVolumeSpecName "kube-api-access-2fpmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.415298 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d46c94b2-04bd-4cc3-a706-daa89146532f-kube-api-access-97p75" (OuterVolumeSpecName: "kube-api-access-97p75") pod "d46c94b2-04bd-4cc3-a706-daa89146532f" (UID: "d46c94b2-04bd-4cc3-a706-daa89146532f"). InnerVolumeSpecName "kube-api-access-97p75". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.426909 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d46c94b2-04bd-4cc3-a706-daa89146532f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d46c94b2-04bd-4cc3-a706-daa89146532f" (UID: "d46c94b2-04bd-4cc3-a706-daa89146532f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.497779 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-config" (OuterVolumeSpecName: "config") pod "ab8abb49-8275-41ed-b2b9-1bff56c5790c" (UID: "ab8abb49-8275-41ed-b2b9-1bff56c5790c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.507692 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab8abb49-8275-41ed-b2b9-1bff56c5790c" (UID: "ab8abb49-8275-41ed-b2b9-1bff56c5790c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.515113 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.515152 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.515204 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d46c94b2-04bd-4cc3-a706-daa89146532f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.515215 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fpmr\" (UniqueName: \"kubernetes.io/projected/ab8abb49-8275-41ed-b2b9-1bff56c5790c-kube-api-access-2fpmr\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.515225 4634 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d46c94b2-04bd-4cc3-a706-daa89146532f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.515234 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97p75\" (UniqueName: \"kubernetes.io/projected/d46c94b2-04bd-4cc3-a706-daa89146532f-kube-api-access-97p75\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.515243 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.515251 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46c94b2-04bd-4cc3-a706-daa89146532f-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.534856 4634 scope.go:117] "RemoveContainer" containerID="a485306e94400e856b14356c0bfafab100a818370852d22e5be9808d0c70f13c" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.681593 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab8abb49-8275-41ed-b2b9-1bff56c5790c" (UID: "ab8abb49-8275-41ed-b2b9-1bff56c5790c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.718869 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab8abb49-8275-41ed-b2b9-1bff56c5790c" (UID: "ab8abb49-8275-41ed-b2b9-1bff56c5790c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.727473 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.727510 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.781817 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ab8abb49-8275-41ed-b2b9-1bff56c5790c" (UID: "ab8abb49-8275-41ed-b2b9-1bff56c5790c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.831567 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab8abb49-8275-41ed-b2b9-1bff56c5790c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.838945 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.933121 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmk5t\" (UniqueName: \"kubernetes.io/projected/f52e3024-e154-435d-892c-86a1b3344fb5-kube-api-access-mmk5t\") pod \"f52e3024-e154-435d-892c-86a1b3344fb5\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.933235 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f52e3024-e154-435d-892c-86a1b3344fb5-horizon-secret-key\") pod \"f52e3024-e154-435d-892c-86a1b3344fb5\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.933281 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-scripts\") pod \"f52e3024-e154-435d-892c-86a1b3344fb5\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.933379 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-config-data\") pod \"f52e3024-e154-435d-892c-86a1b3344fb5\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.933434 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f52e3024-e154-435d-892c-86a1b3344fb5-logs\") pod \"f52e3024-e154-435d-892c-86a1b3344fb5\" (UID: \"f52e3024-e154-435d-892c-86a1b3344fb5\") " Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.938828 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f52e3024-e154-435d-892c-86a1b3344fb5-logs" (OuterVolumeSpecName: "logs") pod "f52e3024-e154-435d-892c-86a1b3344fb5" (UID: "f52e3024-e154-435d-892c-86a1b3344fb5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.939212 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.939643 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-scripts" (OuterVolumeSpecName: "scripts") pod "f52e3024-e154-435d-892c-86a1b3344fb5" (UID: "f52e3024-e154-435d-892c-86a1b3344fb5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.940004 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-config-data" (OuterVolumeSpecName: "config-data") pod "f52e3024-e154-435d-892c-86a1b3344fb5" (UID: "f52e3024-e154-435d-892c-86a1b3344fb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.960481 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f52e3024-e154-435d-892c-86a1b3344fb5-kube-api-access-mmk5t" (OuterVolumeSpecName: "kube-api-access-mmk5t") pod "f52e3024-e154-435d-892c-86a1b3344fb5" (UID: "f52e3024-e154-435d-892c-86a1b3344fb5"). InnerVolumeSpecName "kube-api-access-mmk5t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:47 crc kubenswrapper[4634]: I0929 14:03:47.967667 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f52e3024-e154-435d-892c-86a1b3344fb5-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "f52e3024-e154-435d-892c-86a1b3344fb5" (UID: "f52e3024-e154-435d-892c-86a1b3344fb5"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.035582 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-config-data\") pod \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.035699 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d88hj\" (UniqueName: \"kubernetes.io/projected/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-kube-api-access-d88hj\") pod \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.035809 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-logs\") pod \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.035839 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-horizon-secret-key\") pod \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.035869 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-scripts\") pod \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\" (UID: \"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.036250 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-logs" (OuterVolumeSpecName: "logs") pod "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7" (UID: "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.037371 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-scripts" (OuterVolumeSpecName: "scripts") pod "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7" (UID: "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.037535 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-config-data" (OuterVolumeSpecName: "config-data") pod "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7" (UID: "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.038682 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmk5t\" (UniqueName: \"kubernetes.io/projected/f52e3024-e154-435d-892c-86a1b3344fb5-kube-api-access-mmk5t\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.038703 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.038714 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.038723 4634 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/f52e3024-e154-435d-892c-86a1b3344fb5-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.038733 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.038741 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f52e3024-e154-435d-892c-86a1b3344fb5-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.038749 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.038757 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f52e3024-e154-435d-892c-86a1b3344fb5-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.041022 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7" (UID: "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.050928 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-kube-api-access-d88hj" (OuterVolumeSpecName: "kube-api-access-d88hj") pod "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7" (UID: "b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7"). InnerVolumeSpecName "kube-api-access-d88hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.112256 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-25zsb" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.143077 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-combined-ca-bundle\") pod \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.143695 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-config-data\") pod \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.143729 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drl7r\" (UniqueName: \"kubernetes.io/projected/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-kube-api-access-drl7r\") pod \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.144056 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-db-sync-config-data\") pod \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\" (UID: \"43cee9c9-8b49-4b42-a525-ccd9ab1a9730\") " Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.147324 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d88hj\" (UniqueName: \"kubernetes.io/projected/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-kube-api-access-d88hj\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.147351 4634 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.161173 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-kube-api-access-drl7r" (OuterVolumeSpecName: "kube-api-access-drl7r") pod "43cee9c9-8b49-4b42-a525-ccd9ab1a9730" (UID: "43cee9c9-8b49-4b42-a525-ccd9ab1a9730"). InnerVolumeSpecName "kube-api-access-drl7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.173656 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "43cee9c9-8b49-4b42-a525-ccd9ab1a9730" (UID: "43cee9c9-8b49-4b42-a525-ccd9ab1a9730"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.209266 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cfe29ba-9318-4725-bd8d-771a6f1360c0" path="/var/lib/kubelet/pods/4cfe29ba-9318-4725-bd8d-771a6f1360c0/volumes" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.247860 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43cee9c9-8b49-4b42-a525-ccd9ab1a9730" (UID: "43cee9c9-8b49-4b42-a525-ccd9ab1a9730"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.247964 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-dsvdn"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.250824 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.250887 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drl7r\" (UniqueName: \"kubernetes.io/projected/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-kube-api-access-drl7r\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.250901 4634 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.253503 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5d5866c49b-9tt6g"] Sep 29 14:03:48 crc kubenswrapper[4634]: W0929 14:03:48.282440 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod519a22f3_5513_430a_bd2b_6670ece06c2d.slice/crio-f0dc39cdab7b1e61438fdefeea0a30426f80ec92fadbf57bdf574d46ab53b609 WatchSource:0}: Error finding container f0dc39cdab7b1e61438fdefeea0a30426f80ec92fadbf57bdf574d46ab53b609: Status 404 returned error can't find the container with id f0dc39cdab7b1e61438fdefeea0a30426f80ec92fadbf57bdf574d46ab53b609 Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.293014 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-config-data" (OuterVolumeSpecName: "config-data") pod "43cee9c9-8b49-4b42-a525-ccd9ab1a9730" (UID: "43cee9c9-8b49-4b42-a525-ccd9ab1a9730"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.315127 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"c310be9cfa8ed67485f93d39340fd3b9cbd0be1e3fbae3bd53e3014ebdb22b63"} Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.315246 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-59f849cb9-zxspc" event={"ID":"f52e3024-e154-435d-892c-86a1b3344fb5","Type":"ContainerDied","Data":"a5201a48af67c032cd81b152f3307c7fdd125ab4629fe55b2e78b378ef42f9eb"} Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.315268 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-69db6765d5-mgkh7" event={"ID":"b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7","Type":"ContainerDied","Data":"10bf616c93e0256d325bb544b2341fc731a86fb4531dfca93151c811b91106da"} Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.328321 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-59f849cb9-zxspc" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.329364 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-25zsb" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.329776 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-69db6765d5-mgkh7" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.331711 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.331787 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-25zsb" event={"ID":"43cee9c9-8b49-4b42-a525-ccd9ab1a9730","Type":"ContainerDied","Data":"6894af1c5913d6b7c517560d0b9dcdfb1caebd34666453645570b482ac978b3b"} Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.344779 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6894af1c5913d6b7c517560d0b9dcdfb1caebd34666453645570b482ac978b3b" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.332157 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66cbbc7c87-vxsv2" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.363654 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43cee9c9-8b49-4b42-a525-ccd9ab1a9730-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.499147 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-59f849cb9-zxspc"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.505920 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-59f849cb9-zxspc"] Sep 29 14:03:48 crc kubenswrapper[4634]: E0929 14:03:48.532862 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd46c94b2_04bd_4cc3_a706_daa89146532f.slice/crio-2af78e93c2e5078f93c5e9253c0d1d7c1bcd9f174c419204f83870dde132fe61\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf52e3024_e154_435d_892c_86a1b3344fb5.slice/crio-a5201a48af67c032cd81b152f3307c7fdd125ab4629fe55b2e78b378ef42f9eb\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1b4c6ea_2dbd_49b1_b375_f54ecdcfabf7.slice/crio-10bf616c93e0256d325bb544b2341fc731a86fb4531dfca93151c811b91106da\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1b4c6ea_2dbd_49b1_b375_f54ecdcfabf7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd46c94b2_04bd_4cc3_a706_daa89146532f.slice\": RecentStats: unable to find data in memory cache]" Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.608955 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7bbc59f76-j4rjv"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.656124 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-6927h"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.670727 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-66cbbc7c87-vxsv2"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.684713 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-66cbbc7c87-vxsv2"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.751843 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-69db6765d5-mgkh7"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.758701 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-69db6765d5-mgkh7"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.783569 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-w88gx"] Sep 29 14:03:48 crc kubenswrapper[4634]: I0929 14:03:48.804536 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-w88gx"] Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.010703 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-bqjnz"] Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.043799 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-rp2lh"] Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.231152 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-9d854"] Sep 29 14:03:49 crc kubenswrapper[4634]: E0929 14:03:49.232110 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="init" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.232126 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="init" Sep 29 14:03:49 crc kubenswrapper[4634]: E0929 14:03:49.232162 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43cee9c9-8b49-4b42-a525-ccd9ab1a9730" containerName="glance-db-sync" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.232169 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="43cee9c9-8b49-4b42-a525-ccd9ab1a9730" containerName="glance-db-sync" Sep 29 14:03:49 crc kubenswrapper[4634]: E0929 14:03:49.232182 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="dnsmasq-dns" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.232188 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="dnsmasq-dns" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.232390 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="dnsmasq-dns" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.232407 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="43cee9c9-8b49-4b42-a525-ccd9ab1a9730" containerName="glance-db-sync" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.233457 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.285244 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-w88gx" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.132:5353: i/o timeout" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.294066 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-9d854"] Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.415750 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7bbc59f76-j4rjv" event={"ID":"1427174f-e673-4c3e-bf36-f1463327fd61","Type":"ContainerStarted","Data":"22b15cb39178373aaae412638951a5164f4062fbb550a96feaa3b70bdb0e1f20"} Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.416548 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.416614 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zvwp\" (UniqueName: \"kubernetes.io/projected/d3aecf87-94be-4fdc-81b4-24eddad58770-kube-api-access-7zvwp\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.416673 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-config\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.416701 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.416723 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.416760 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.424221 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6927h" event={"ID":"a5cbaa37-b66c-4549-9ccd-e9ba5771038a","Type":"ContainerStarted","Data":"84db524cd6190624d16007cedb8f95897645e879e3b2cf73dec0ecbc6c4be684"} Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.437358 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dsvdn" event={"ID":"519a22f3-5513-430a-bd2b-6670ece06c2d","Type":"ContainerStarted","Data":"f0dc39cdab7b1e61438fdefeea0a30426f80ec92fadbf57bdf574d46ab53b609"} Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.521558 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-config\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.521607 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.521637 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.521691 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.521790 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.521823 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zvwp\" (UniqueName: \"kubernetes.io/projected/d3aecf87-94be-4fdc-81b4-24eddad58770-kube-api-access-7zvwp\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.528455 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-config\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.529214 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.529266 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.530029 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.530232 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.535360 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d5866c49b-9tt6g" event={"ID":"24cc4bfc-123a-479d-afb7-ca6b62cd7754","Type":"ContainerStarted","Data":"ac8bff9ac65675dc2cad1b9cf611dfe13b0540b132aacc616963a1e8cfa2f745"} Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.555134 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bqjnz" event={"ID":"aac1358d-d39f-4732-97fa-cc8947c81bdb","Type":"ContainerStarted","Data":"0b85cd83b23fbc9bee435043dc5c4ebd744063a3d77b487b4a238d22124b428b"} Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.612227 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rp2lh" event={"ID":"1dfb65bf-8466-4184-a169-755fbb7d65d9","Type":"ContainerStarted","Data":"3eeb4582d8254dc8a3aceaa0fa85db2e3d36255a4018214e0f020823b283eef0"} Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.743130 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zvwp\" (UniqueName: \"kubernetes.io/projected/d3aecf87-94be-4fdc-81b4-24eddad58770-kube-api-access-7zvwp\") pod \"dnsmasq-dns-8b5c85b87-9d854\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:49 crc kubenswrapper[4634]: I0929 14:03:49.887754 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.134139 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab8abb49-8275-41ed-b2b9-1bff56c5790c" path="/var/lib/kubelet/pods/ab8abb49-8275-41ed-b2b9-1bff56c5790c/volumes" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.136683 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7" path="/var/lib/kubelet/pods/b1b4c6ea-2dbd-49b1-b375-f54ecdcfabf7/volumes" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.137432 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d46c94b2-04bd-4cc3-a706-daa89146532f" path="/var/lib/kubelet/pods/d46c94b2-04bd-4cc3-a706-daa89146532f/volumes" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.137884 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f52e3024-e154-435d-892c-86a1b3344fb5" path="/var/lib/kubelet/pods/f52e3024-e154-435d-892c-86a1b3344fb5/volumes" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.431168 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.433252 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.437557 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mdc99" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.437754 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.437870 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.441834 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.575469 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-config-data\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.576002 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.576057 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.576121 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.576150 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-scripts\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.576182 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-logs\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.576226 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctbzd\" (UniqueName: \"kubernetes.io/projected/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-kube-api-access-ctbzd\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.581970 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-9d854"] Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.627480 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rp2lh" event={"ID":"1dfb65bf-8466-4184-a169-755fbb7d65d9","Type":"ContainerStarted","Data":"933b8bbb36dfb007f66544045aa0662ae838762226822174e515759f482fd247"} Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.634887 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7bbc59f76-j4rjv" event={"ID":"1427174f-e673-4c3e-bf36-f1463327fd61","Type":"ContainerStarted","Data":"7dbc773a851935823ae057d5f45287b8cb997b84d3d50b9ecf58704e2fd62dd0"} Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.654922 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d5866c49b-9tt6g" event={"ID":"24cc4bfc-123a-479d-afb7-ca6b62cd7754","Type":"ContainerStarted","Data":"331261af0bd48ecd426471f9965c972e0aaf8b0fc4c5817c8e7524210c6ade9f"} Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.658706 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.662940 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bqjnz" event={"ID":"aac1358d-d39f-4732-97fa-cc8947c81bdb","Type":"ContainerStarted","Data":"bb5673135f867de6d57893626f372f653b907692d0d78c44e581635e7e400137"} Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.664375 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.669758 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.678213 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.678266 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-scripts\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.678296 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-logs\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.678341 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctbzd\" (UniqueName: \"kubernetes.io/projected/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-kube-api-access-ctbzd\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.678398 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-config-data\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.678429 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.678469 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.678820 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.684034 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-logs\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.684348 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.703155 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.706981 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-config-data\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.709040 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.727794 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-scripts\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.734656 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-rp2lh" podStartSLOduration=4.734639283 podStartE2EDuration="4.734639283s" podCreationTimestamp="2025-09-29 14:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:03:50.672771363 +0000 UTC m=+1161.241499112" watchObservedRunningTime="2025-09-29 14:03:50.734639283 +0000 UTC m=+1161.303367032" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.742938 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctbzd\" (UniqueName: \"kubernetes.io/projected/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-kube-api-access-ctbzd\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.746568 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.779312 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.781607 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.781673 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.781701 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmpq6\" (UniqueName: \"kubernetes.io/projected/e98db531-bbba-41b4-97df-f94db814d3f9-kube-api-access-fmpq6\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.781735 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-logs\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.781765 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.781785 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.781804 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.784925 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-bqjnz" podStartSLOduration=16.784914696 podStartE2EDuration="16.784914696s" podCreationTimestamp="2025-09-29 14:03:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:03:50.767032737 +0000 UTC m=+1161.335760506" watchObservedRunningTime="2025-09-29 14:03:50.784914696 +0000 UTC m=+1161.353642445" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.883841 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.884589 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.884628 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmpq6\" (UniqueName: \"kubernetes.io/projected/e98db531-bbba-41b4-97df-f94db814d3f9-kube-api-access-fmpq6\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.884650 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-logs\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.884673 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.884696 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.884715 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.888288 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.888521 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-logs\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.888612 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.896843 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.909149 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.917809 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmpq6\" (UniqueName: \"kubernetes.io/projected/e98db531-bbba-41b4-97df-f94db814d3f9-kube-api-access-fmpq6\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.923482 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:50 crc kubenswrapper[4634]: I0929 14:03:50.952522 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:03:51 crc kubenswrapper[4634]: I0929 14:03:51.142356 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:03:51 crc kubenswrapper[4634]: W0929 14:03:51.464163 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3aecf87_94be_4fdc_81b4_24eddad58770.slice/crio-cdbec4c773addfee57748bc7b63285bbe9daffb43a5376b7d4630337e976ab5f WatchSource:0}: Error finding container cdbec4c773addfee57748bc7b63285bbe9daffb43a5376b7d4630337e976ab5f: Status 404 returned error can't find the container with id cdbec4c773addfee57748bc7b63285bbe9daffb43a5376b7d4630337e976ab5f Sep 29 14:03:51 crc kubenswrapper[4634]: I0929 14:03:51.692255 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" event={"ID":"d3aecf87-94be-4fdc-81b4-24eddad58770","Type":"ContainerStarted","Data":"cdbec4c773addfee57748bc7b63285bbe9daffb43a5376b7d4630337e976ab5f"} Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.365180 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:03:52 crc kubenswrapper[4634]: W0929 14:03:52.379186 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb547ae5d_71f9_43ee_be14_a9cd2f1c1de7.slice/crio-904f489d8698da1ad79e650ea2b546ffafbc8334bca04cb18e46de95e3af1e34 WatchSource:0}: Error finding container 904f489d8698da1ad79e650ea2b546ffafbc8334bca04cb18e46de95e3af1e34: Status 404 returned error can't find the container with id 904f489d8698da1ad79e650ea2b546ffafbc8334bca04cb18e46de95e3af1e34 Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.557772 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.750177 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.754595 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"061c94cb-cc6c-4a14-a0c4-4bcef38173b7","Type":"ContainerStarted","Data":"0048fa50e8fa68718e3de30d8b2da6272a12b039e412b574a35f901fb82a98a5"} Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.771188 4634 generic.go:334] "Generic (PLEG): container finished" podID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerID="40b3532afabbf65d06826e9b75c7a0d7e9b8b15da2d8a92cc18440c6f5cb67d2" exitCode=0 Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.771245 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" event={"ID":"d3aecf87-94be-4fdc-81b4-24eddad58770","Type":"ContainerDied","Data":"40b3532afabbf65d06826e9b75c7a0d7e9b8b15da2d8a92cc18440c6f5cb67d2"} Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.781099 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98db531-bbba-41b4-97df-f94db814d3f9","Type":"ContainerStarted","Data":"4a393f103e09702866f724fea70b1c104221320b21efead6eface691778a95b0"} Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.790264 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7","Type":"ContainerStarted","Data":"904f489d8698da1ad79e650ea2b546ffafbc8334bca04cb18e46de95e3af1e34"} Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.860139 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7bbc59f76-j4rjv" event={"ID":"1427174f-e673-4c3e-bf36-f1463327fd61","Type":"ContainerStarted","Data":"fd599d2a47dc10f2251e1e9bfc707c55aaa8162d18f2e03f6679c324016f8548"} Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.874173 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5d5866c49b-9tt6g" event={"ID":"24cc4bfc-123a-479d-afb7-ca6b62cd7754","Type":"ContainerStarted","Data":"876a7a494c6b676d3091d6d7e60d9c32c455004b299c7f508da9519a9d2af3cb"} Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.886601 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.920973 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7bbc59f76-j4rjv" podStartSLOduration=21.152327202 podStartE2EDuration="21.92046628s" podCreationTimestamp="2025-09-29 14:03:31 +0000 UTC" firstStartedPulling="2025-09-29 14:03:48.611332133 +0000 UTC m=+1159.180059882" lastFinishedPulling="2025-09-29 14:03:49.379471211 +0000 UTC m=+1159.948198960" observedRunningTime="2025-09-29 14:03:52.901478941 +0000 UTC m=+1163.470206690" watchObservedRunningTime="2025-09-29 14:03:52.92046628 +0000 UTC m=+1163.489194039" Sep 29 14:03:52 crc kubenswrapper[4634]: I0929 14:03:52.940686 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-5d5866c49b-9tt6g" podStartSLOduration=21.323256169 podStartE2EDuration="21.940665751s" podCreationTimestamp="2025-09-29 14:03:31 +0000 UTC" firstStartedPulling="2025-09-29 14:03:48.307711921 +0000 UTC m=+1158.876439670" lastFinishedPulling="2025-09-29 14:03:48.925121503 +0000 UTC m=+1159.493849252" observedRunningTime="2025-09-29 14:03:52.930688359 +0000 UTC m=+1163.499416108" watchObservedRunningTime="2025-09-29 14:03:52.940665751 +0000 UTC m=+1163.509393500" Sep 29 14:03:53 crc kubenswrapper[4634]: I0929 14:03:53.894984 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" event={"ID":"d3aecf87-94be-4fdc-81b4-24eddad58770","Type":"ContainerStarted","Data":"5d4e8c01cdc480f7b204c8eb09455d4b04756506570d3e1bde68d5d1aab9c079"} Sep 29 14:03:53 crc kubenswrapper[4634]: I0929 14:03:53.896482 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:54 crc kubenswrapper[4634]: I0929 14:03:54.915938 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98db531-bbba-41b4-97df-f94db814d3f9","Type":"ContainerStarted","Data":"be8d8ca5bef9e9695e44efaa6d90f6af3964db0b69a9a85f630950ccdc37ba2a"} Sep 29 14:03:54 crc kubenswrapper[4634]: I0929 14:03:54.922680 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7","Type":"ContainerStarted","Data":"5fb50a50054d196899776672c765e4a9a52dde31bcb09b265408ee57556d1918"} Sep 29 14:03:56 crc kubenswrapper[4634]: I0929 14:03:56.946624 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98db531-bbba-41b4-97df-f94db814d3f9","Type":"ContainerStarted","Data":"ddabd6bed2321f27da0b21c9efd68867963baf78d7a6ad82302571efc27db04a"} Sep 29 14:03:56 crc kubenswrapper[4634]: I0929 14:03:56.946829 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" containerName="glance-httpd" containerID="cri-o://ddabd6bed2321f27da0b21c9efd68867963baf78d7a6ad82302571efc27db04a" gracePeriod=30 Sep 29 14:03:56 crc kubenswrapper[4634]: I0929 14:03:56.946792 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" containerName="glance-log" containerID="cri-o://be8d8ca5bef9e9695e44efaa6d90f6af3964db0b69a9a85f630950ccdc37ba2a" gracePeriod=30 Sep 29 14:03:56 crc kubenswrapper[4634]: I0929 14:03:56.952663 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7","Type":"ContainerStarted","Data":"6ead59276f1cebbcaa6da9b702c719228dde42778e42db8d830880c40b196f03"} Sep 29 14:03:56 crc kubenswrapper[4634]: I0929 14:03:56.952808 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerName="glance-log" containerID="cri-o://5fb50a50054d196899776672c765e4a9a52dde31bcb09b265408ee57556d1918" gracePeriod=30 Sep 29 14:03:56 crc kubenswrapper[4634]: I0929 14:03:56.952932 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerName="glance-httpd" containerID="cri-o://6ead59276f1cebbcaa6da9b702c719228dde42778e42db8d830880c40b196f03" gracePeriod=30 Sep 29 14:03:56 crc kubenswrapper[4634]: I0929 14:03:56.977769 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" podStartSLOduration=7.977743407 podStartE2EDuration="7.977743407s" podCreationTimestamp="2025-09-29 14:03:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:03:53.920757929 +0000 UTC m=+1164.489485678" watchObservedRunningTime="2025-09-29 14:03:56.977743407 +0000 UTC m=+1167.546471156" Sep 29 14:03:56 crc kubenswrapper[4634]: I0929 14:03:56.979274 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.97926829 podStartE2EDuration="7.97926829s" podCreationTimestamp="2025-09-29 14:03:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:03:56.967325065 +0000 UTC m=+1167.536052814" watchObservedRunningTime="2025-09-29 14:03:56.97926829 +0000 UTC m=+1167.547996039" Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.015188 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.015164099 podStartE2EDuration="8.015164099s" podCreationTimestamp="2025-09-29 14:03:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:03:57.009667436 +0000 UTC m=+1167.578395175" watchObservedRunningTime="2025-09-29 14:03:57.015164099 +0000 UTC m=+1167.583891838" Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.966492 4634 generic.go:334] "Generic (PLEG): container finished" podID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerID="6ead59276f1cebbcaa6da9b702c719228dde42778e42db8d830880c40b196f03" exitCode=0 Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.966858 4634 generic.go:334] "Generic (PLEG): container finished" podID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerID="5fb50a50054d196899776672c765e4a9a52dde31bcb09b265408ee57556d1918" exitCode=143 Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.966570 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7","Type":"ContainerDied","Data":"6ead59276f1cebbcaa6da9b702c719228dde42778e42db8d830880c40b196f03"} Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.966943 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7","Type":"ContainerDied","Data":"5fb50a50054d196899776672c765e4a9a52dde31bcb09b265408ee57556d1918"} Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.970464 4634 generic.go:334] "Generic (PLEG): container finished" podID="e98db531-bbba-41b4-97df-f94db814d3f9" containerID="ddabd6bed2321f27da0b21c9efd68867963baf78d7a6ad82302571efc27db04a" exitCode=0 Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.970505 4634 generic.go:334] "Generic (PLEG): container finished" podID="e98db531-bbba-41b4-97df-f94db814d3f9" containerID="be8d8ca5bef9e9695e44efaa6d90f6af3964db0b69a9a85f630950ccdc37ba2a" exitCode=143 Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.970535 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98db531-bbba-41b4-97df-f94db814d3f9","Type":"ContainerDied","Data":"ddabd6bed2321f27da0b21c9efd68867963baf78d7a6ad82302571efc27db04a"} Sep 29 14:03:57 crc kubenswrapper[4634]: I0929 14:03:57.970574 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98db531-bbba-41b4-97df-f94db814d3f9","Type":"ContainerDied","Data":"be8d8ca5bef9e9695e44efaa6d90f6af3964db0b69a9a85f630950ccdc37ba2a"} Sep 29 14:03:59 crc kubenswrapper[4634]: I0929 14:03:59.891422 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:03:59 crc kubenswrapper[4634]: I0929 14:03:59.980606 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-zjh78"] Sep 29 14:03:59 crc kubenswrapper[4634]: I0929 14:03:59.981208 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" containerName="dnsmasq-dns" containerID="cri-o://b0e50e76221f9aeae9ee0a28d05474748ce117d08c0f703a288f3e0f46c63610" gracePeriod=10 Sep 29 14:04:00 crc kubenswrapper[4634]: I0929 14:04:00.043859 4634 generic.go:334] "Generic (PLEG): container finished" podID="1dfb65bf-8466-4184-a169-755fbb7d65d9" containerID="933b8bbb36dfb007f66544045aa0662ae838762226822174e515759f482fd247" exitCode=0 Sep 29 14:04:00 crc kubenswrapper[4634]: I0929 14:04:00.043913 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rp2lh" event={"ID":"1dfb65bf-8466-4184-a169-755fbb7d65d9","Type":"ContainerDied","Data":"933b8bbb36dfb007f66544045aa0662ae838762226822174e515759f482fd247"} Sep 29 14:04:01 crc kubenswrapper[4634]: I0929 14:04:01.106809 4634 generic.go:334] "Generic (PLEG): container finished" podID="c74c2608-d914-4498-a016-603a32c1fd5c" containerID="b0e50e76221f9aeae9ee0a28d05474748ce117d08c0f703a288f3e0f46c63610" exitCode=0 Sep 29 14:04:01 crc kubenswrapper[4634]: I0929 14:04:01.107060 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" event={"ID":"c74c2608-d914-4498-a016-603a32c1fd5c","Type":"ContainerDied","Data":"b0e50e76221f9aeae9ee0a28d05474748ce117d08c0f703a288f3e0f46c63610"} Sep 29 14:04:01 crc kubenswrapper[4634]: I0929 14:04:01.595853 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:04:01 crc kubenswrapper[4634]: I0929 14:04:01.596425 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:04:01 crc kubenswrapper[4634]: I0929 14:04:01.726209 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:04:01 crc kubenswrapper[4634]: I0929 14:04:01.727441 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:04:08 crc kubenswrapper[4634]: I0929 14:04:08.387313 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: i/o timeout" Sep 29 14:04:10 crc kubenswrapper[4634]: E0929 14:04:10.303050 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/sg-core:latest" Sep 29 14:04:10 crc kubenswrapper[4634]: E0929 14:04:10.304337 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/openstack-k8s-operators/sg-core:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:sg-core-conf-yaml,ReadOnly:false,MountPath:/etc/sg-core.conf.yaml,SubPath:sg-core.conf.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-csfmr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(061c94cb-cc6c-4a14-a0c4-4bcef38173b7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.788181 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.807288 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.853689 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-scripts\") pod \"1dfb65bf-8466-4184-a169-755fbb7d65d9\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.853865 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-nb\") pod \"c74c2608-d914-4498-a016-603a32c1fd5c\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.853899 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkxwv\" (UniqueName: \"kubernetes.io/projected/1dfb65bf-8466-4184-a169-755fbb7d65d9-kube-api-access-gkxwv\") pod \"1dfb65bf-8466-4184-a169-755fbb7d65d9\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.853947 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mflb5\" (UniqueName: \"kubernetes.io/projected/c74c2608-d914-4498-a016-603a32c1fd5c-kube-api-access-mflb5\") pod \"c74c2608-d914-4498-a016-603a32c1fd5c\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.854052 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-config-data\") pod \"1dfb65bf-8466-4184-a169-755fbb7d65d9\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.855134 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-combined-ca-bundle\") pod \"1dfb65bf-8466-4184-a169-755fbb7d65d9\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.855176 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-svc\") pod \"c74c2608-d914-4498-a016-603a32c1fd5c\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.855229 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-config\") pod \"c74c2608-d914-4498-a016-603a32c1fd5c\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.855289 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-swift-storage-0\") pod \"c74c2608-d914-4498-a016-603a32c1fd5c\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.855338 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-credential-keys\") pod \"1dfb65bf-8466-4184-a169-755fbb7d65d9\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.857365 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-fernet-keys\") pod \"1dfb65bf-8466-4184-a169-755fbb7d65d9\" (UID: \"1dfb65bf-8466-4184-a169-755fbb7d65d9\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.857411 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-sb\") pod \"c74c2608-d914-4498-a016-603a32c1fd5c\" (UID: \"c74c2608-d914-4498-a016-603a32c1fd5c\") " Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.867722 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dfb65bf-8466-4184-a169-755fbb7d65d9-kube-api-access-gkxwv" (OuterVolumeSpecName: "kube-api-access-gkxwv") pod "1dfb65bf-8466-4184-a169-755fbb7d65d9" (UID: "1dfb65bf-8466-4184-a169-755fbb7d65d9"). InnerVolumeSpecName "kube-api-access-gkxwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.874731 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkxwv\" (UniqueName: \"kubernetes.io/projected/1dfb65bf-8466-4184-a169-755fbb7d65d9-kube-api-access-gkxwv\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.877550 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c74c2608-d914-4498-a016-603a32c1fd5c-kube-api-access-mflb5" (OuterVolumeSpecName: "kube-api-access-mflb5") pod "c74c2608-d914-4498-a016-603a32c1fd5c" (UID: "c74c2608-d914-4498-a016-603a32c1fd5c"). InnerVolumeSpecName "kube-api-access-mflb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.885103 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-scripts" (OuterVolumeSpecName: "scripts") pod "1dfb65bf-8466-4184-a169-755fbb7d65d9" (UID: "1dfb65bf-8466-4184-a169-755fbb7d65d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.891725 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1dfb65bf-8466-4184-a169-755fbb7d65d9" (UID: "1dfb65bf-8466-4184-a169-755fbb7d65d9"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.906249 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1dfb65bf-8466-4184-a169-755fbb7d65d9" (UID: "1dfb65bf-8466-4184-a169-755fbb7d65d9"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.964821 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1dfb65bf-8466-4184-a169-755fbb7d65d9" (UID: "1dfb65bf-8466-4184-a169-755fbb7d65d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.978532 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mflb5\" (UniqueName: \"kubernetes.io/projected/c74c2608-d914-4498-a016-603a32c1fd5c-kube-api-access-mflb5\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.978565 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.978607 4634 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.978621 4634 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.978631 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.984690 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-config-data" (OuterVolumeSpecName: "config-data") pod "1dfb65bf-8466-4184-a169-755fbb7d65d9" (UID: "1dfb65bf-8466-4184-a169-755fbb7d65d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:10 crc kubenswrapper[4634]: I0929 14:04:10.991312 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c74c2608-d914-4498-a016-603a32c1fd5c" (UID: "c74c2608-d914-4498-a016-603a32c1fd5c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:10.996733 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c74c2608-d914-4498-a016-603a32c1fd5c" (UID: "c74c2608-d914-4498-a016-603a32c1fd5c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.005493 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c74c2608-d914-4498-a016-603a32c1fd5c" (UID: "c74c2608-d914-4498-a016-603a32c1fd5c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.015687 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c74c2608-d914-4498-a016-603a32c1fd5c" (UID: "c74c2608-d914-4498-a016-603a32c1fd5c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.018625 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-config" (OuterVolumeSpecName: "config") pod "c74c2608-d914-4498-a016-603a32c1fd5c" (UID: "c74c2608-d914-4498-a016-603a32c1fd5c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.080572 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1dfb65bf-8466-4184-a169-755fbb7d65d9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.080989 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.080999 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.081009 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.081020 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.081040 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c74c2608-d914-4498-a016-603a32c1fd5c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.265723 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" event={"ID":"c74c2608-d914-4498-a016-603a32c1fd5c","Type":"ContainerDied","Data":"48ecd851a5c185e9692cebc382112fe286b137775a78b6dd17347155956e3bb4"} Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.265778 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.265831 4634 scope.go:117] "RemoveContainer" containerID="b0e50e76221f9aeae9ee0a28d05474748ce117d08c0f703a288f3e0f46c63610" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.270154 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rp2lh" event={"ID":"1dfb65bf-8466-4184-a169-755fbb7d65d9","Type":"ContainerDied","Data":"3eeb4582d8254dc8a3aceaa0fa85db2e3d36255a4018214e0f020823b283eef0"} Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.270205 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3eeb4582d8254dc8a3aceaa0fa85db2e3d36255a4018214e0f020823b283eef0" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.270279 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rp2lh" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.314389 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-zjh78"] Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.321377 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76fcf4b695-zjh78"] Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.598718 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7bbc59f76-j4rjv" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.729067 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d5866c49b-9tt6g" podUID="24cc4bfc-123a-479d-afb7-ca6b62cd7754" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.934425 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-587bf8586b-wjkjk"] Sep 29 14:04:11 crc kubenswrapper[4634]: E0929 14:04:11.934981 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" containerName="dnsmasq-dns" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.934995 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" containerName="dnsmasq-dns" Sep 29 14:04:11 crc kubenswrapper[4634]: E0929 14:04:11.935053 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" containerName="init" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.935061 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" containerName="init" Sep 29 14:04:11 crc kubenswrapper[4634]: E0929 14:04:11.935070 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dfb65bf-8466-4184-a169-755fbb7d65d9" containerName="keystone-bootstrap" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.935103 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dfb65bf-8466-4184-a169-755fbb7d65d9" containerName="keystone-bootstrap" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.935419 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dfb65bf-8466-4184-a169-755fbb7d65d9" containerName="keystone-bootstrap" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.935441 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" containerName="dnsmasq-dns" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.936661 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.955795 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.956061 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.956165 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.956273 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.956426 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-l544j" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.956542 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 14:04:11 crc kubenswrapper[4634]: I0929 14:04:11.971295 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-587bf8586b-wjkjk"] Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.004607 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-public-tls-certs\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.004664 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-combined-ca-bundle\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.004708 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-internal-tls-certs\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.004821 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-scripts\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.004846 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qpmc\" (UniqueName: \"kubernetes.io/projected/f725a01f-c382-4260-8e4e-e530d7c0ed82-kube-api-access-7qpmc\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.004865 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-credential-keys\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.004884 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-fernet-keys\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.004919 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-config-data\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.107259 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-internal-tls-certs\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.107828 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-scripts\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.107855 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qpmc\" (UniqueName: \"kubernetes.io/projected/f725a01f-c382-4260-8e4e-e530d7c0ed82-kube-api-access-7qpmc\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.108837 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-credential-keys\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.109435 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-fernet-keys\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.109478 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-config-data\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.109552 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-public-tls-certs\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.109586 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-combined-ca-bundle\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.117978 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-credential-keys\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.118575 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-config-data\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.123685 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-scripts\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.124577 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-public-tls-certs\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.128354 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-combined-ca-bundle\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.142540 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qpmc\" (UniqueName: \"kubernetes.io/projected/f725a01f-c382-4260-8e4e-e530d7c0ed82-kube-api-access-7qpmc\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.142838 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-fernet-keys\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.142927 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f725a01f-c382-4260-8e4e-e530d7c0ed82-internal-tls-certs\") pod \"keystone-587bf8586b-wjkjk\" (UID: \"f725a01f-c382-4260-8e4e-e530d7c0ed82\") " pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.156728 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" path="/var/lib/kubelet/pods/c74c2608-d914-4498-a016-603a32c1fd5c/volumes" Sep 29 14:04:12 crc kubenswrapper[4634]: I0929 14:04:12.309953 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:13 crc kubenswrapper[4634]: I0929 14:04:13.389236 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-76fcf4b695-zjh78" podUID="c74c2608-d914-4498-a016-603a32c1fd5c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: i/o timeout" Sep 29 14:04:20 crc kubenswrapper[4634]: I0929 14:04:20.779991 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 14:04:20 crc kubenswrapper[4634]: I0929 14:04:20.780691 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 14:04:21 crc kubenswrapper[4634]: I0929 14:04:21.142557 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:21 crc kubenswrapper[4634]: I0929 14:04:21.142612 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:21 crc kubenswrapper[4634]: I0929 14:04:21.596541 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7bbc59f76-j4rjv" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 14:04:21 crc kubenswrapper[4634]: I0929 14:04:21.726950 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-5d5866c49b-9tt6g" podUID="24cc4bfc-123a-479d-afb7-ca6b62cd7754" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 14:04:25 crc kubenswrapper[4634]: I0929 14:04:25.956354 4634 scope.go:117] "RemoveContainer" containerID="bc362c12268e249114c4ab48be51147c1fa9af11fc2cd60a591c3ec307543480" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.038869 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.046385 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239129 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-scripts\") pod \"e98db531-bbba-41b4-97df-f94db814d3f9\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239209 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-scripts\") pod \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239230 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"e98db531-bbba-41b4-97df-f94db814d3f9\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239273 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmpq6\" (UniqueName: \"kubernetes.io/projected/e98db531-bbba-41b4-97df-f94db814d3f9-kube-api-access-fmpq6\") pod \"e98db531-bbba-41b4-97df-f94db814d3f9\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239308 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-combined-ca-bundle\") pod \"e98db531-bbba-41b4-97df-f94db814d3f9\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239344 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-combined-ca-bundle\") pod \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239368 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctbzd\" (UniqueName: \"kubernetes.io/projected/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-kube-api-access-ctbzd\") pod \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239390 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-httpd-run\") pod \"e98db531-bbba-41b4-97df-f94db814d3f9\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239437 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-config-data\") pod \"e98db531-bbba-41b4-97df-f94db814d3f9\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239490 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239531 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-config-data\") pod \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239564 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-logs\") pod \"e98db531-bbba-41b4-97df-f94db814d3f9\" (UID: \"e98db531-bbba-41b4-97df-f94db814d3f9\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239597 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-httpd-run\") pod \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.239658 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-logs\") pod \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\" (UID: \"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7\") " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.241917 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e98db531-bbba-41b4-97df-f94db814d3f9" (UID: "e98db531-bbba-41b4-97df-f94db814d3f9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.246717 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-scripts" (OuterVolumeSpecName: "scripts") pod "e98db531-bbba-41b4-97df-f94db814d3f9" (UID: "e98db531-bbba-41b4-97df-f94db814d3f9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.247763 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-logs" (OuterVolumeSpecName: "logs") pod "e98db531-bbba-41b4-97df-f94db814d3f9" (UID: "e98db531-bbba-41b4-97df-f94db814d3f9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.248123 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "e98db531-bbba-41b4-97df-f94db814d3f9" (UID: "e98db531-bbba-41b4-97df-f94db814d3f9"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.248288 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e98db531-bbba-41b4-97df-f94db814d3f9-kube-api-access-fmpq6" (OuterVolumeSpecName: "kube-api-access-fmpq6") pod "e98db531-bbba-41b4-97df-f94db814d3f9" (UID: "e98db531-bbba-41b4-97df-f94db814d3f9"). InnerVolumeSpecName "kube-api-access-fmpq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.249908 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" (UID: "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.250361 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-logs" (OuterVolumeSpecName: "logs") pod "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" (UID: "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.259323 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-scripts" (OuterVolumeSpecName: "scripts") pod "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" (UID: "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.259336 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" (UID: "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.292019 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e98db531-bbba-41b4-97df-f94db814d3f9" (UID: "e98db531-bbba-41b4-97df-f94db814d3f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.292161 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-kube-api-access-ctbzd" (OuterVolumeSpecName: "kube-api-access-ctbzd") pod "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" (UID: "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7"). InnerVolumeSpecName "kube-api-access-ctbzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.292225 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" (UID: "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.333171 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-config-data" (OuterVolumeSpecName: "config-data") pod "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" (UID: "b547ae5d-71f9-43ee-be14-a9cd2f1c1de7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342538 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342597 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342645 4634 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342663 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmpq6\" (UniqueName: \"kubernetes.io/projected/e98db531-bbba-41b4-97df-f94db814d3f9-kube-api-access-fmpq6\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342682 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342695 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342704 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctbzd\" (UniqueName: \"kubernetes.io/projected/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-kube-api-access-ctbzd\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342715 4634 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342733 4634 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342743 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342756 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e98db531-bbba-41b4-97df-f94db814d3f9-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342766 4634 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.342780 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.352279 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-config-data" (OuterVolumeSpecName: "config-data") pod "e98db531-bbba-41b4-97df-f94db814d3f9" (UID: "e98db531-bbba-41b4-97df-f94db814d3f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.367770 4634 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.374672 4634 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.445386 4634 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.445422 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e98db531-bbba-41b4-97df-f94db814d3f9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.445435 4634 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.470951 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e98db531-bbba-41b4-97df-f94db814d3f9","Type":"ContainerDied","Data":"4a393f103e09702866f724fea70b1c104221320b21efead6eface691778a95b0"} Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.470980 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.479195 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b547ae5d-71f9-43ee-be14-a9cd2f1c1de7","Type":"ContainerDied","Data":"904f489d8698da1ad79e650ea2b546ffafbc8334bca04cb18e46de95e3af1e34"} Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.479283 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.516329 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.524689 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.551274 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.579155 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.592769 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:04:26 crc kubenswrapper[4634]: E0929 14:04:26.593286 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerName="glance-httpd" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.593301 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerName="glance-httpd" Sep 29 14:04:26 crc kubenswrapper[4634]: E0929 14:04:26.593311 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" containerName="glance-log" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.593317 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" containerName="glance-log" Sep 29 14:04:26 crc kubenswrapper[4634]: E0929 14:04:26.593328 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" containerName="glance-httpd" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.593335 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" containerName="glance-httpd" Sep 29 14:04:26 crc kubenswrapper[4634]: E0929 14:04:26.593349 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerName="glance-log" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.593354 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerName="glance-log" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.593560 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" containerName="glance-log" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.593574 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" containerName="glance-httpd" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.593586 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerName="glance-log" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.593596 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" containerName="glance-httpd" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.594622 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.600786 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-mdc99" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.601193 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.601314 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.601455 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.618616 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.654010 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.659403 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.671430 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.676314 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.691792 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.767792 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.767843 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.767881 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.767911 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.768640 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.768731 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769027 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-logs\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769065 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769137 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769162 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769198 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769283 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-logs\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769302 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v4mk\" (UniqueName: \"kubernetes.io/projected/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-kube-api-access-2v4mk\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769324 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769344 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.769389 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9lnt\" (UniqueName: \"kubernetes.io/projected/ef284948-85a5-432e-b6a6-a57f988f02fc-kube-api-access-t9lnt\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873525 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873578 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-logs\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873597 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873621 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873642 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873677 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873740 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-logs\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873761 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v4mk\" (UniqueName: \"kubernetes.io/projected/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-kube-api-access-2v4mk\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873783 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873835 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873894 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9lnt\" (UniqueName: \"kubernetes.io/projected/ef284948-85a5-432e-b6a6-a57f988f02fc-kube-api-access-t9lnt\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873920 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873941 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873963 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.873980 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.874004 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.874848 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.875049 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.875545 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.880399 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.880727 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-logs\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.880834 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.886759 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-logs\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.891837 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.891935 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.893031 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v4mk\" (UniqueName: \"kubernetes.io/projected/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-kube-api-access-2v4mk\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.894927 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.895870 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-scripts\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.896123 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.896418 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-config-data\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.900121 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9lnt\" (UniqueName: \"kubernetes.io/projected/ef284948-85a5-432e-b6a6-a57f988f02fc-kube-api-access-t9lnt\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.901018 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.929807 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " pod="openstack/glance-default-external-api-0" Sep 29 14:04:26 crc kubenswrapper[4634]: I0929 14:04:26.950423 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:04:27 crc kubenswrapper[4634]: I0929 14:04:27.002573 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:04:27 crc kubenswrapper[4634]: I0929 14:04:27.254911 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:28 crc kubenswrapper[4634]: I0929 14:04:28.122060 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b547ae5d-71f9-43ee-be14-a9cd2f1c1de7" path="/var/lib/kubelet/pods/b547ae5d-71f9-43ee-be14-a9cd2f1c1de7/volumes" Sep 29 14:04:28 crc kubenswrapper[4634]: I0929 14:04:28.122873 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e98db531-bbba-41b4-97df-f94db814d3f9" path="/var/lib/kubelet/pods/e98db531-bbba-41b4-97df-f94db814d3f9/volumes" Sep 29 14:04:28 crc kubenswrapper[4634]: E0929 14:04:28.429048 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 29 14:04:28 crc kubenswrapper[4634]: E0929 14:04:28.429902 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k5tsd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-dsvdn_openstack(519a22f3-5513-430a-bd2b-6670ece06c2d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:04:28 crc kubenswrapper[4634]: E0929 14:04:28.431493 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-dsvdn" podUID="519a22f3-5513-430a-bd2b-6670ece06c2d" Sep 29 14:04:28 crc kubenswrapper[4634]: I0929 14:04:28.469536 4634 scope.go:117] "RemoveContainer" containerID="ddabd6bed2321f27da0b21c9efd68867963baf78d7a6ad82302571efc27db04a" Sep 29 14:04:28 crc kubenswrapper[4634]: E0929 14:04:28.541254 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-dsvdn" podUID="519a22f3-5513-430a-bd2b-6670ece06c2d" Sep 29 14:04:28 crc kubenswrapper[4634]: I0929 14:04:28.563174 4634 scope.go:117] "RemoveContainer" containerID="be8d8ca5bef9e9695e44efaa6d90f6af3964db0b69a9a85f630950ccdc37ba2a" Sep 29 14:04:28 crc kubenswrapper[4634]: I0929 14:04:28.653975 4634 scope.go:117] "RemoveContainer" containerID="6ead59276f1cebbcaa6da9b702c719228dde42778e42db8d830880c40b196f03" Sep 29 14:04:28 crc kubenswrapper[4634]: I0929 14:04:28.716526 4634 scope.go:117] "RemoveContainer" containerID="5fb50a50054d196899776672c765e4a9a52dde31bcb09b265408ee57556d1918" Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.020060 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-587bf8586b-wjkjk"] Sep 29 14:04:29 crc kubenswrapper[4634]: W0929 14:04:29.023789 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf725a01f_c382_4260_8e4e_e530d7c0ed82.slice/crio-25240d6caf08974f67a1863b8c1f19a7215c1da2a0225b5f8c2fdbc918bd326f WatchSource:0}: Error finding container 25240d6caf08974f67a1863b8c1f19a7215c1da2a0225b5f8c2fdbc918bd326f: Status 404 returned error can't find the container with id 25240d6caf08974f67a1863b8c1f19a7215c1da2a0225b5f8c2fdbc918bd326f Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.219866 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:04:29 crc kubenswrapper[4634]: W0929 14:04:29.238769 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b2378f2_6227_4121_b9cc_a8ecbda2ff6b.slice/crio-827ec655154c4ef7f8c378435c16daf563b1488578cf4089da803fc5fdbb3d4f WatchSource:0}: Error finding container 827ec655154c4ef7f8c378435c16daf563b1488578cf4089da803fc5fdbb3d4f: Status 404 returned error can't find the container with id 827ec655154c4ef7f8c378435c16daf563b1488578cf4089da803fc5fdbb3d4f Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.543584 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b","Type":"ContainerStarted","Data":"827ec655154c4ef7f8c378435c16daf563b1488578cf4089da803fc5fdbb3d4f"} Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.550415 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6927h" event={"ID":"a5cbaa37-b66c-4549-9ccd-e9ba5771038a","Type":"ContainerStarted","Data":"5e2afff12bdd152a5f432cbdd8f291fe2e553a524d5bd99a629b9c68ad24d2cd"} Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.559688 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-587bf8586b-wjkjk" event={"ID":"f725a01f-c382-4260-8e4e-e530d7c0ed82","Type":"ContainerStarted","Data":"281e09991b4f0d8b56163cb24a567fec92bde008a18bdbd3efcbaa83d720a490"} Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.559769 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-587bf8586b-wjkjk" event={"ID":"f725a01f-c382-4260-8e4e-e530d7c0ed82","Type":"ContainerStarted","Data":"25240d6caf08974f67a1863b8c1f19a7215c1da2a0225b5f8c2fdbc918bd326f"} Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.560285 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.567393 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-577dn" event={"ID":"1c1084d0-17b1-40a1-b57e-11e41ad8db3b","Type":"ContainerStarted","Data":"99221a0eaa7e0a10c48f6c4f5cada96e65e5b31dbcfab1eb29288d0f72b7ea56"} Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.583211 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-6927h" podStartSLOduration=13.893912973 podStartE2EDuration="53.583182527s" podCreationTimestamp="2025-09-29 14:03:36 +0000 UTC" firstStartedPulling="2025-09-29 14:03:48.693506477 +0000 UTC m=+1159.262234226" lastFinishedPulling="2025-09-29 14:04:28.382776031 +0000 UTC m=+1198.951503780" observedRunningTime="2025-09-29 14:04:29.572845977 +0000 UTC m=+1200.141573726" watchObservedRunningTime="2025-09-29 14:04:29.583182527 +0000 UTC m=+1200.151910276" Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.615118 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-577dn" podStartSLOduration=3.36401942 podStartE2EDuration="1m7.615069926s" podCreationTimestamp="2025-09-29 14:03:22 +0000 UTC" firstStartedPulling="2025-09-29 14:03:24.243326447 +0000 UTC m=+1134.812054196" lastFinishedPulling="2025-09-29 14:04:28.494376953 +0000 UTC m=+1199.063104702" observedRunningTime="2025-09-29 14:04:29.595516521 +0000 UTC m=+1200.164244270" watchObservedRunningTime="2025-09-29 14:04:29.615069926 +0000 UTC m=+1200.183797675" Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.670153 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-587bf8586b-wjkjk" podStartSLOduration=18.67011991 podStartE2EDuration="18.67011991s" podCreationTimestamp="2025-09-29 14:04:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:29.618977532 +0000 UTC m=+1200.187705281" watchObservedRunningTime="2025-09-29 14:04:29.67011991 +0000 UTC m=+1200.238847659" Sep 29 14:04:29 crc kubenswrapper[4634]: I0929 14:04:29.799308 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:04:30 crc kubenswrapper[4634]: I0929 14:04:30.591903 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b","Type":"ContainerStarted","Data":"43b95836ae5ded67751cfa89e26f9bc1af003788323af2b3fce3cebe37de45d1"} Sep 29 14:04:32 crc kubenswrapper[4634]: I0929 14:04:32.621772 4634 generic.go:334] "Generic (PLEG): container finished" podID="aac1358d-d39f-4732-97fa-cc8947c81bdb" containerID="bb5673135f867de6d57893626f372f653b907692d0d78c44e581635e7e400137" exitCode=0 Sep 29 14:04:32 crc kubenswrapper[4634]: I0929 14:04:32.621941 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bqjnz" event={"ID":"aac1358d-d39f-4732-97fa-cc8947c81bdb","Type":"ContainerDied","Data":"bb5673135f867de6d57893626f372f653b907692d0d78c44e581635e7e400137"} Sep 29 14:04:33 crc kubenswrapper[4634]: I0929 14:04:33.633542 4634 generic.go:334] "Generic (PLEG): container finished" podID="1c1084d0-17b1-40a1-b57e-11e41ad8db3b" containerID="99221a0eaa7e0a10c48f6c4f5cada96e65e5b31dbcfab1eb29288d0f72b7ea56" exitCode=0 Sep 29 14:04:33 crc kubenswrapper[4634]: I0929 14:04:33.633653 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-577dn" event={"ID":"1c1084d0-17b1-40a1-b57e-11e41ad8db3b","Type":"ContainerDied","Data":"99221a0eaa7e0a10c48f6c4f5cada96e65e5b31dbcfab1eb29288d0f72b7ea56"} Sep 29 14:04:33 crc kubenswrapper[4634]: I0929 14:04:33.636399 4634 generic.go:334] "Generic (PLEG): container finished" podID="a5cbaa37-b66c-4549-9ccd-e9ba5771038a" containerID="5e2afff12bdd152a5f432cbdd8f291fe2e553a524d5bd99a629b9c68ad24d2cd" exitCode=0 Sep 29 14:04:33 crc kubenswrapper[4634]: I0929 14:04:33.636611 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6927h" event={"ID":"a5cbaa37-b66c-4549-9ccd-e9ba5771038a","Type":"ContainerDied","Data":"5e2afff12bdd152a5f432cbdd8f291fe2e553a524d5bd99a629b9c68ad24d2cd"} Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.166369 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.271323 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9twx\" (UniqueName: \"kubernetes.io/projected/aac1358d-d39f-4732-97fa-cc8947c81bdb-kube-api-access-w9twx\") pod \"aac1358d-d39f-4732-97fa-cc8947c81bdb\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.271538 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-combined-ca-bundle\") pod \"aac1358d-d39f-4732-97fa-cc8947c81bdb\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.271679 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-config\") pod \"aac1358d-d39f-4732-97fa-cc8947c81bdb\" (UID: \"aac1358d-d39f-4732-97fa-cc8947c81bdb\") " Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.294059 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aac1358d-d39f-4732-97fa-cc8947c81bdb-kube-api-access-w9twx" (OuterVolumeSpecName: "kube-api-access-w9twx") pod "aac1358d-d39f-4732-97fa-cc8947c81bdb" (UID: "aac1358d-d39f-4732-97fa-cc8947c81bdb"). InnerVolumeSpecName "kube-api-access-w9twx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.299694 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-config" (OuterVolumeSpecName: "config") pod "aac1358d-d39f-4732-97fa-cc8947c81bdb" (UID: "aac1358d-d39f-4732-97fa-cc8947c81bdb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.312209 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aac1358d-d39f-4732-97fa-cc8947c81bdb" (UID: "aac1358d-d39f-4732-97fa-cc8947c81bdb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.361936 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.374388 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9twx\" (UniqueName: \"kubernetes.io/projected/aac1358d-d39f-4732-97fa-cc8947c81bdb-kube-api-access-w9twx\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.374428 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.374442 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/aac1358d-d39f-4732-97fa-cc8947c81bdb-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.378470 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.651829 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ef284948-85a5-432e-b6a6-a57f988f02fc","Type":"ContainerStarted","Data":"dd4ebbc2cfd79ed3ba82d018fce73c409fd0a717cc9dd62678ab778c4c3bdb3f"} Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.661359 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-bqjnz" Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.669137 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-bqjnz" event={"ID":"aac1358d-d39f-4732-97fa-cc8947c81bdb","Type":"ContainerDied","Data":"0b85cd83b23fbc9bee435043dc5c4ebd744063a3d77b487b4a238d22124b428b"} Sep 29 14:04:34 crc kubenswrapper[4634]: I0929 14:04:34.669218 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b85cd83b23fbc9bee435043dc5c4ebd744063a3d77b487b4a238d22124b428b" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.104640 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-2vffv"] Sep 29 14:04:35 crc kubenswrapper[4634]: E0929 14:04:35.112466 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aac1358d-d39f-4732-97fa-cc8947c81bdb" containerName="neutron-db-sync" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.112491 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="aac1358d-d39f-4732-97fa-cc8947c81bdb" containerName="neutron-db-sync" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.112697 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="aac1358d-d39f-4732-97fa-cc8947c81bdb" containerName="neutron-db-sync" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.113761 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.232248 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-2vffv"] Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.352646 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwhrf\" (UniqueName: \"kubernetes.io/projected/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-kube-api-access-lwhrf\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.353036 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-config\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.353238 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.353364 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.353463 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.353541 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.456149 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwhrf\" (UniqueName: \"kubernetes.io/projected/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-kube-api-access-lwhrf\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.456199 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-config\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.456232 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.456274 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.456310 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.456609 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.458216 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-swift-storage-0\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.461265 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-nb\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.462378 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-config\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.462950 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-sb\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.467873 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-svc\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.489440 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-54f5455b48-gwkmt"] Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.491951 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.499498 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-54f5455b48-gwkmt"] Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.507484 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-75qvh" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.507692 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.507772 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.507940 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.546570 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwhrf\" (UniqueName: \"kubernetes.io/projected/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-kube-api-access-lwhrf\") pod \"dnsmasq-dns-84b966f6c9-2vffv\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.666540 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm2c5\" (UniqueName: \"kubernetes.io/projected/2d7c0763-bf32-4ecb-b1bd-b989e492e943-kube-api-access-gm2c5\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.666615 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-ovndb-tls-certs\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.666651 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-config\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.666681 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-combined-ca-bundle\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.666728 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-httpd-config\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.769274 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-config\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.769363 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-combined-ca-bundle\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.769426 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-httpd-config\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.769504 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm2c5\" (UniqueName: \"kubernetes.io/projected/2d7c0763-bf32-4ecb-b1bd-b989e492e943-kube-api-access-gm2c5\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.769570 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-ovndb-tls-certs\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.777236 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-config\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.777523 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-combined-ca-bundle\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.779117 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-httpd-config\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.798833 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-ovndb-tls-certs\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.803923 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm2c5\" (UniqueName: \"kubernetes.io/projected/2d7c0763-bf32-4ecb-b1bd-b989e492e943-kube-api-access-gm2c5\") pod \"neutron-54f5455b48-gwkmt\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.815530 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:35 crc kubenswrapper[4634]: I0929 14:04:35.883897 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:36 crc kubenswrapper[4634]: I0929 14:04:36.954096 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-577dn" Sep 29 14:04:36 crc kubenswrapper[4634]: I0929 14:04:36.965429 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6927h" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.104504 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6pvg\" (UniqueName: \"kubernetes.io/projected/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-kube-api-access-l6pvg\") pod \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.106493 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-config-data\") pod \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.106554 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-db-sync-config-data\") pod \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.106707 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7hbf\" (UniqueName: \"kubernetes.io/projected/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-kube-api-access-v7hbf\") pod \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.106812 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-combined-ca-bundle\") pod \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.106900 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-logs\") pod \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.107012 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-combined-ca-bundle\") pod \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\" (UID: \"a5cbaa37-b66c-4549-9ccd-e9ba5771038a\") " Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.107038 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-scripts\") pod \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\" (UID: \"1c1084d0-17b1-40a1-b57e-11e41ad8db3b\") " Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.111875 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a5cbaa37-b66c-4549-9ccd-e9ba5771038a" (UID: "a5cbaa37-b66c-4549-9ccd-e9ba5771038a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.125752 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-kube-api-access-l6pvg" (OuterVolumeSpecName: "kube-api-access-l6pvg") pod "1c1084d0-17b1-40a1-b57e-11e41ad8db3b" (UID: "1c1084d0-17b1-40a1-b57e-11e41ad8db3b"). InnerVolumeSpecName "kube-api-access-l6pvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.126528 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-logs" (OuterVolumeSpecName: "logs") pod "1c1084d0-17b1-40a1-b57e-11e41ad8db3b" (UID: "1c1084d0-17b1-40a1-b57e-11e41ad8db3b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.135204 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-kube-api-access-v7hbf" (OuterVolumeSpecName: "kube-api-access-v7hbf") pod "a5cbaa37-b66c-4549-9ccd-e9ba5771038a" (UID: "a5cbaa37-b66c-4549-9ccd-e9ba5771038a"). InnerVolumeSpecName "kube-api-access-v7hbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.144219 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-scripts" (OuterVolumeSpecName: "scripts") pod "1c1084d0-17b1-40a1-b57e-11e41ad8db3b" (UID: "1c1084d0-17b1-40a1-b57e-11e41ad8db3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.164337 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.164374 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6pvg\" (UniqueName: \"kubernetes.io/projected/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-kube-api-access-l6pvg\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.164388 4634 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.164397 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7hbf\" (UniqueName: \"kubernetes.io/projected/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-kube-api-access-v7hbf\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.164406 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.180234 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-config-data" (OuterVolumeSpecName: "config-data") pod "1c1084d0-17b1-40a1-b57e-11e41ad8db3b" (UID: "1c1084d0-17b1-40a1-b57e-11e41ad8db3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.213785 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5cbaa37-b66c-4549-9ccd-e9ba5771038a" (UID: "a5cbaa37-b66c-4549-9ccd-e9ba5771038a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.243468 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c1084d0-17b1-40a1-b57e-11e41ad8db3b" (UID: "1c1084d0-17b1-40a1-b57e-11e41ad8db3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.265739 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.268617 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.268652 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5cbaa37-b66c-4549-9ccd-e9ba5771038a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.268662 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c1084d0-17b1-40a1-b57e-11e41ad8db3b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:37 crc kubenswrapper[4634]: E0929 14:04:37.445349 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.532379 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7f758ffcf7-qsxtz"] Sep 29 14:04:37 crc kubenswrapper[4634]: E0929 14:04:37.532855 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5cbaa37-b66c-4549-9ccd-e9ba5771038a" containerName="barbican-db-sync" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.532874 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5cbaa37-b66c-4549-9ccd-e9ba5771038a" containerName="barbican-db-sync" Sep 29 14:04:37 crc kubenswrapper[4634]: E0929 14:04:37.532912 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c1084d0-17b1-40a1-b57e-11e41ad8db3b" containerName="placement-db-sync" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.532923 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c1084d0-17b1-40a1-b57e-11e41ad8db3b" containerName="placement-db-sync" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.533137 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c1084d0-17b1-40a1-b57e-11e41ad8db3b" containerName="placement-db-sync" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.533156 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5cbaa37-b66c-4549-9ccd-e9ba5771038a" containerName="barbican-db-sync" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.534184 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.551997 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.552278 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.566262 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7f758ffcf7-qsxtz"] Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.574442 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-5d5866c49b-9tt6g" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.587479 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdrtl\" (UniqueName: \"kubernetes.io/projected/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-kube-api-access-qdrtl\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.587593 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-combined-ca-bundle\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.587708 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-config\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.587734 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-internal-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.587765 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-httpd-config\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.587816 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-ovndb-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.587862 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-public-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.671919 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7bbc59f76-j4rjv"] Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.689892 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-ovndb-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.689939 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-public-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.690008 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdrtl\" (UniqueName: \"kubernetes.io/projected/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-kube-api-access-qdrtl\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.690042 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-combined-ca-bundle\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.690120 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-config\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.690143 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-internal-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.690166 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-httpd-config\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.711101 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-httpd-config\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.712298 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-config\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.724493 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6927h" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.730525 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-combined-ca-bundle\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.730720 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6927h" event={"ID":"a5cbaa37-b66c-4549-9ccd-e9ba5771038a","Type":"ContainerDied","Data":"84db524cd6190624d16007cedb8f95897645e879e3b2cf73dec0ecbc6c4be684"} Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.730757 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84db524cd6190624d16007cedb8f95897645e879e3b2cf73dec0ecbc6c4be684" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.739069 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdrtl\" (UniqueName: \"kubernetes.io/projected/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-kube-api-access-qdrtl\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.756279 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-ovndb-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.759516 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-577dn" event={"ID":"1c1084d0-17b1-40a1-b57e-11e41ad8db3b","Type":"ContainerDied","Data":"0cdd329c87c0cd98dbd1e4e99b8f26f6452ec5b88fe120e9a499eb2092713447"} Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.760059 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cdd329c87c0cd98dbd1e4e99b8f26f6452ec5b88fe120e9a499eb2092713447" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.760379 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-577dn" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.760685 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-public-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.761447 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2e0453ac-a888-4906-8a1f-9ba9a0f797e0-internal-tls-certs\") pod \"neutron-7f758ffcf7-qsxtz\" (UID: \"2e0453ac-a888-4906-8a1f-9ba9a0f797e0\") " pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.764602 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-2vffv"] Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.823379 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7bbc59f76-j4rjv" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon-log" containerID="cri-o://7dbc773a851935823ae057d5f45287b8cb997b84d3d50b9ecf58704e2fd62dd0" gracePeriod=30 Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.824885 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="ceilometer-notification-agent" containerID="cri-o://0048fa50e8fa68718e3de30d8b2da6272a12b039e412b574a35f901fb82a98a5" gracePeriod=30 Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.826702 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7bbc59f76-j4rjv" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" containerID="cri-o://fd599d2a47dc10f2251e1e9bfc707c55aaa8162d18f2e03f6679c324016f8548" gracePeriod=30 Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.828786 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"061c94cb-cc6c-4a14-a0c4-4bcef38173b7","Type":"ContainerStarted","Data":"f3a0d79b872cc4c22a86229d575f404459460f94214c1109ef0ffee698ef1de4"} Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.829736 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="proxy-httpd" containerID="cri-o://f3a0d79b872cc4c22a86229d575f404459460f94214c1109ef0ffee698ef1de4" gracePeriod=30 Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.830003 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.833406 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-54f5455b48-gwkmt"] Sep 29 14:04:37 crc kubenswrapper[4634]: I0929 14:04:37.888579 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.156428 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5bfb7db698-tmn8x"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.165014 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.170529 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.178794 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.181504 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.182211 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5bfb7db698-tmn8x"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.183312 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.183568 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-qw9np" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.348122 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-combined-ca-bundle\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.348172 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-internal-tls-certs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.348216 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-config-data\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.348267 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b38f115-526d-4093-b79c-19e6b9258dbf-logs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.348305 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67qlc\" (UniqueName: \"kubernetes.io/projected/2b38f115-526d-4093-b79c-19e6b9258dbf-kube-api-access-67qlc\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.348337 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-public-tls-certs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.348390 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-scripts\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.376177 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-645d46567c-ngxbx"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.377838 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.406724 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.407276 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-brpbz" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.407400 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.441331 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-645d46567c-ngxbx"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.450392 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b38f115-526d-4093-b79c-19e6b9258dbf-logs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.450615 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67qlc\" (UniqueName: \"kubernetes.io/projected/2b38f115-526d-4093-b79c-19e6b9258dbf-kube-api-access-67qlc\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.450717 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-public-tls-certs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.450831 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-scripts\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.450937 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-combined-ca-bundle\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.451019 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-internal-tls-certs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.458154 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b38f115-526d-4093-b79c-19e6b9258dbf-logs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.465959 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-config-data\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.489375 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-5658b5d69b-mlcxf"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.491739 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.512725 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.512726 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-public-tls-certs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.519992 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-scripts\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.520973 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-combined-ca-bundle\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.521310 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5658b5d69b-mlcxf"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.521596 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-config-data\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.525894 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b38f115-526d-4093-b79c-19e6b9258dbf-internal-tls-certs\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.543438 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67qlc\" (UniqueName: \"kubernetes.io/projected/2b38f115-526d-4093-b79c-19e6b9258dbf-kube-api-access-67qlc\") pod \"placement-5bfb7db698-tmn8x\" (UID: \"2b38f115-526d-4093-b79c-19e6b9258dbf\") " pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.572665 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-config-data\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.572845 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c697d1f5-42d0-4a87-9704-64a6e1406db1-logs\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.572957 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg9ch\" (UniqueName: \"kubernetes.io/projected/c697d1f5-42d0-4a87-9704-64a6e1406db1-kube-api-access-sg9ch\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.573061 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-combined-ca-bundle\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.573172 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-config-data-custom\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.594373 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-2vffv"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.663987 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jddgc"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.676497 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8vk6\" (UniqueName: \"kubernetes.io/projected/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-kube-api-access-s8vk6\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.676608 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-config-data\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.677290 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-config-data-custom\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.677334 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-combined-ca-bundle\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.677386 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c697d1f5-42d0-4a87-9704-64a6e1406db1-logs\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.677451 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg9ch\" (UniqueName: \"kubernetes.io/projected/c697d1f5-42d0-4a87-9704-64a6e1406db1-kube-api-access-sg9ch\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.677481 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-logs\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.677511 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-combined-ca-bundle\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.677529 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-config-data\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.677555 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-config-data-custom\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.679430 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c697d1f5-42d0-4a87-9704-64a6e1406db1-logs\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.679661 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.682263 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-config-data\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.695534 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-config-data-custom\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.708700 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg9ch\" (UniqueName: \"kubernetes.io/projected/c697d1f5-42d0-4a87-9704-64a6e1406db1-kube-api-access-sg9ch\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.723298 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jddgc"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.729997 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c697d1f5-42d0-4a87-9704-64a6e1406db1-combined-ca-bundle\") pod \"barbican-worker-645d46567c-ngxbx\" (UID: \"c697d1f5-42d0-4a87-9704-64a6e1406db1\") " pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.748292 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6b7c5bc546-zfttb"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.759606 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.770366 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.779752 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.779845 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8vk6\" (UniqueName: \"kubernetes.io/projected/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-kube-api-access-s8vk6\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.779871 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-config-data-custom\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.779894 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-combined-ca-bundle\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.779939 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84cgv\" (UniqueName: \"kubernetes.io/projected/d4baf450-6210-4b45-9c46-810b190b1741-kube-api-access-84cgv\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.779983 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-logs\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.780000 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-config\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.780027 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-config-data\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.780049 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.780069 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.780120 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.791070 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-logs\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.796919 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-combined-ca-bundle\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.814655 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-config-data-custom\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.821407 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8vk6\" (UniqueName: \"kubernetes.io/projected/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-kube-api-access-s8vk6\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.830229 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b7c5bc546-zfttb"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.835890 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e86a12-7d7e-4bbe-bcf2-030f754a91a2-config-data\") pod \"barbican-keystone-listener-5658b5d69b-mlcxf\" (UID: \"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2\") " pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.837377 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.850912 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883230 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883287 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883317 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883362 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883389 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-logs\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883433 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data-custom\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883473 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvzpp\" (UniqueName: \"kubernetes.io/projected/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-kube-api-access-xvzpp\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883501 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-combined-ca-bundle\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883524 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84cgv\" (UniqueName: \"kubernetes.io/projected/d4baf450-6210-4b45-9c46-810b190b1741-kube-api-access-84cgv\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883548 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.883578 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-config\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.889728 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-config\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.890375 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.890902 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.908779 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.910944 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.923420 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ef284948-85a5-432e-b6a6-a57f988f02fc","Type":"ContainerStarted","Data":"ff5795357b5aa7fe51d9efa1132531504898e060de408f369c5fad52016d304d"} Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.938508 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84cgv\" (UniqueName: \"kubernetes.io/projected/d4baf450-6210-4b45-9c46-810b190b1741-kube-api-access-84cgv\") pod \"dnsmasq-dns-75c8ddd69c-jddgc\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.939710 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" event={"ID":"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40","Type":"ContainerStarted","Data":"b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739"} Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.939771 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" event={"ID":"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40","Type":"ContainerStarted","Data":"9a520bd0dd1c72b5e746cf721c57a66384d882430481f068a20fff53b1447bdc"} Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.939964 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" podUID="75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" containerName="init" containerID="cri-o://b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739" gracePeriod=10 Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.951642 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7f758ffcf7-qsxtz"] Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.971410 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54f5455b48-gwkmt" event={"ID":"2d7c0763-bf32-4ecb-b1bd-b989e492e943","Type":"ContainerStarted","Data":"234b74c8bccfa87df5a4ee49d1a3189478c634ddd98b00ae4dd119a84e8b2908"} Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.971774 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54f5455b48-gwkmt" event={"ID":"2d7c0763-bf32-4ecb-b1bd-b989e492e943","Type":"ContainerStarted","Data":"ddffcd9c479eaf360ebbce8ddf24df626192e6ff0e065d44b3f7eeea0939b7ad"} Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.985804 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.986164 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-logs\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.986340 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data-custom\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.986471 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvzpp\" (UniqueName: \"kubernetes.io/projected/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-kube-api-access-xvzpp\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.986580 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-combined-ca-bundle\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.990276 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-logs\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:38 crc kubenswrapper[4634]: W0929 14:04:38.995380 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e0453ac_a888_4906_8a1f_9ba9a0f797e0.slice/crio-278a0e5b6758fa7b56d90a3057e3710fc0cfee804e14b4a461e9805dcd5c74f2 WatchSource:0}: Error finding container 278a0e5b6758fa7b56d90a3057e3710fc0cfee804e14b4a461e9805dcd5c74f2: Status 404 returned error can't find the container with id 278a0e5b6758fa7b56d90a3057e3710fc0cfee804e14b4a461e9805dcd5c74f2 Sep 29 14:04:38 crc kubenswrapper[4634]: I0929 14:04:38.999682 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-combined-ca-bundle\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.007055 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.007873 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data-custom\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.014897 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b","Type":"ContainerStarted","Data":"d693106158ca88657de75dd1486e13e25c292e0a766989d4b2a7bd85fbd84a1a"} Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.019731 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-645d46567c-ngxbx" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.024708 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvzpp\" (UniqueName: \"kubernetes.io/projected/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-kube-api-access-xvzpp\") pod \"barbican-api-6b7c5bc546-zfttb\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.062969 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=13.062949273 podStartE2EDuration="13.062949273s" podCreationTimestamp="2025-09-29 14:04:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:39.048168974 +0000 UTC m=+1209.616896723" watchObservedRunningTime="2025-09-29 14:04:39.062949273 +0000 UTC m=+1209.631677022" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.197826 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.288020 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.494442 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.573492 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-config\") pod \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.574015 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-sb\") pod \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.574156 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-swift-storage-0\") pod \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.574380 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-nb\") pod \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.574429 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-svc\") pod \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.574493 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwhrf\" (UniqueName: \"kubernetes.io/projected/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-kube-api-access-lwhrf\") pod \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.613986 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-kube-api-access-lwhrf" (OuterVolumeSpecName: "kube-api-access-lwhrf") pod "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" (UID: "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40"). InnerVolumeSpecName "kube-api-access-lwhrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.676873 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwhrf\" (UniqueName: \"kubernetes.io/projected/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-kube-api-access-lwhrf\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.775590 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" (UID: "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.779329 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5bfb7db698-tmn8x"] Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.782236 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-config" (OuterVolumeSpecName: "config") pod "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" (UID: "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.798285 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-config\") pod \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\" (UID: \"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40\") " Sep 29 14:04:39 crc kubenswrapper[4634]: W0929 14:04:39.800187 4634 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40/volumes/kubernetes.io~configmap/config Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.800210 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-config" (OuterVolumeSpecName: "config") pod "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" (UID: "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.802537 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.802921 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.884325 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" (UID: "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.904891 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.914117 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-5658b5d69b-mlcxf"] Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.947619 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" (UID: "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:39 crc kubenswrapper[4634]: I0929 14:04:39.948776 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" (UID: "75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:39 crc kubenswrapper[4634]: W0929 14:04:39.955744 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1e86a12_7d7e_4bbe_bcf2_030f754a91a2.slice/crio-18e760215dfbafc1e3bc07f7216812eba92a086823d58d8c60210a892f819aae WatchSource:0}: Error finding container 18e760215dfbafc1e3bc07f7216812eba92a086823d58d8c60210a892f819aae: Status 404 returned error can't find the container with id 18e760215dfbafc1e3bc07f7216812eba92a086823d58d8c60210a892f819aae Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.005784 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-645d46567c-ngxbx"] Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.010894 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.010927 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.069134 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54f5455b48-gwkmt" event={"ID":"2d7c0763-bf32-4ecb-b1bd-b989e492e943","Type":"ContainerStarted","Data":"c99e5c0faef3b7c8c5ecbf0919ee165ee2274be3982d3263030eb303399e6d9d"} Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.071577 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.094449 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" event={"ID":"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2","Type":"ContainerStarted","Data":"18e760215dfbafc1e3bc07f7216812eba92a086823d58d8c60210a892f819aae"} Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.127326 4634 generic.go:334] "Generic (PLEG): container finished" podID="75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" containerID="b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739" exitCode=0 Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.127447 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.176663 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-54f5455b48-gwkmt" podStartSLOduration=5.17663768 podStartE2EDuration="5.17663768s" podCreationTimestamp="2025-09-29 14:04:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:40.102630664 +0000 UTC m=+1210.671358413" watchObservedRunningTime="2025-09-29 14:04:40.17663768 +0000 UTC m=+1210.745365429" Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.358820 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" event={"ID":"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40","Type":"ContainerDied","Data":"b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739"} Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.359247 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84b966f6c9-2vffv" event={"ID":"75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40","Type":"ContainerDied","Data":"9a520bd0dd1c72b5e746cf721c57a66384d882430481f068a20fff53b1447bdc"} Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.359262 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bfb7db698-tmn8x" event={"ID":"2b38f115-526d-4093-b79c-19e6b9258dbf","Type":"ContainerStarted","Data":"75433ca60ea263fdb7999b5f4d925c0601d460adafbcd912d8698c24db374b05"} Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.359291 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f758ffcf7-qsxtz" event={"ID":"2e0453ac-a888-4906-8a1f-9ba9a0f797e0","Type":"ContainerStarted","Data":"5ee9adfa2d533692ecf1bc1384eb71fe808143e72a3652687151c438a891ff0d"} Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.359305 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f758ffcf7-qsxtz" event={"ID":"2e0453ac-a888-4906-8a1f-9ba9a0f797e0","Type":"ContainerStarted","Data":"278a0e5b6758fa7b56d90a3057e3710fc0cfee804e14b4a461e9805dcd5c74f2"} Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.359378 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jddgc"] Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.359395 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6b7c5bc546-zfttb"] Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.359418 4634 scope.go:117] "RemoveContainer" containerID="b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739" Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.444570 4634 scope.go:117] "RemoveContainer" containerID="b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739" Sep 29 14:04:40 crc kubenswrapper[4634]: E0929 14:04:40.446298 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739\": container with ID starting with b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739 not found: ID does not exist" containerID="b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739" Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.446420 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739"} err="failed to get container status \"b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739\": rpc error: code = NotFound desc = could not find container \"b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739\": container with ID starting with b517042abca4bf813cae3fdf2ef05f093936932d5fbaee8a16bfdd7448264739 not found: ID does not exist" Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.645863 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-2vffv"] Sep 29 14:04:40 crc kubenswrapper[4634]: I0929 14:04:40.662396 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84b966f6c9-2vffv"] Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.211309 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ef284948-85a5-432e-b6a6-a57f988f02fc","Type":"ContainerStarted","Data":"2927dd651f052970082f59949258c928b3afb3883520ecfdd71d09f10a29dee0"} Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.241500 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bfb7db698-tmn8x" event={"ID":"2b38f115-526d-4093-b79c-19e6b9258dbf","Type":"ContainerStarted","Data":"004b5ab7a91c28b212aa1db56365521d5b4a954a34b94c3cb76753f71ee4b3eb"} Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.299142 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7f758ffcf7-qsxtz" event={"ID":"2e0453ac-a888-4906-8a1f-9ba9a0f797e0","Type":"ContainerStarted","Data":"d642fe726ddefd060e408ecafc2d0870df696871d6d03390ff770f708c4eb751"} Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.300559 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.336588 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" event={"ID":"d4baf450-6210-4b45-9c46-810b190b1741","Type":"ContainerStarted","Data":"0f6c9c0c9bab68227dc882d7aaa3f2bc6901e6fbba9bf5c926069958734387ff"} Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.336642 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" event={"ID":"d4baf450-6210-4b45-9c46-810b190b1741","Type":"ContainerStarted","Data":"603d4ddf94e89a69d92f9dc672a7cbc03008bff2c54ae282877ff7af03d34cd6"} Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.367813 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7f758ffcf7-qsxtz" podStartSLOduration=4.367789869 podStartE2EDuration="4.367789869s" podCreationTimestamp="2025-09-29 14:04:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:41.357605933 +0000 UTC m=+1211.926333682" watchObservedRunningTime="2025-09-29 14:04:41.367789869 +0000 UTC m=+1211.936517618" Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.372977 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=15.372970544 podStartE2EDuration="15.372970544s" podCreationTimestamp="2025-09-29 14:04:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:41.259707306 +0000 UTC m=+1211.828435065" watchObservedRunningTime="2025-09-29 14:04:41.372970544 +0000 UTC m=+1211.941698283" Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.386444 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b7c5bc546-zfttb" event={"ID":"845a9190-05ae-4893-a7e3-d4bcaa7d8d53","Type":"ContainerStarted","Data":"04ad20588640ae84f0a4b506764f8f9f7858baefcada5c481aebc89d21c4b0fc"} Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.386485 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b7c5bc546-zfttb" event={"ID":"845a9190-05ae-4893-a7e3-d4bcaa7d8d53","Type":"ContainerStarted","Data":"53c15d0993634bdaa999eef9b9dbfcaa9790084c3c51944263481692cfb618a6"} Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.409451 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-645d46567c-ngxbx" event={"ID":"c697d1f5-42d0-4a87-9704-64a6e1406db1","Type":"ContainerStarted","Data":"71f2f40a3f9ff64856e4b6a1c61a4ae3c83c3932e024a044ffb2d95d0e973309"} Sep 29 14:04:41 crc kubenswrapper[4634]: I0929 14:04:41.596431 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7bbc59f76-j4rjv" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.132572 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" path="/var/lib/kubelet/pods/75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40/volumes" Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.427866 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b7c5bc546-zfttb" event={"ID":"845a9190-05ae-4893-a7e3-d4bcaa7d8d53","Type":"ContainerStarted","Data":"bcdb05a5274428257047e4b7630d9d63c64ed44adcdbd3938884f86db208a259"} Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.429349 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.429389 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.447780 4634 generic.go:334] "Generic (PLEG): container finished" podID="1427174f-e673-4c3e-bf36-f1463327fd61" containerID="fd599d2a47dc10f2251e1e9bfc707c55aaa8162d18f2e03f6679c324016f8548" exitCode=0 Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.447905 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7bbc59f76-j4rjv" event={"ID":"1427174f-e673-4c3e-bf36-f1463327fd61","Type":"ContainerDied","Data":"fd599d2a47dc10f2251e1e9bfc707c55aaa8162d18f2e03f6679c324016f8548"} Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.449416 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6b7c5bc546-zfttb" podStartSLOduration=4.449390932 podStartE2EDuration="4.449390932s" podCreationTimestamp="2025-09-29 14:04:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:42.448344639 +0000 UTC m=+1213.017072388" watchObservedRunningTime="2025-09-29 14:04:42.449390932 +0000 UTC m=+1213.018118671" Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.486601 4634 generic.go:334] "Generic (PLEG): container finished" podID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerID="0048fa50e8fa68718e3de30d8b2da6272a12b039e412b574a35f901fb82a98a5" exitCode=0 Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.486709 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"061c94cb-cc6c-4a14-a0c4-4bcef38173b7","Type":"ContainerDied","Data":"0048fa50e8fa68718e3de30d8b2da6272a12b039e412b574a35f901fb82a98a5"} Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.499282 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5bfb7db698-tmn8x" event={"ID":"2b38f115-526d-4093-b79c-19e6b9258dbf","Type":"ContainerStarted","Data":"323fe2a9c5bb070d1cda5a13883c6c5d5e4898b8aafeafbb90227dd0930c09d6"} Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.500944 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.501062 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.510143 4634 generic.go:334] "Generic (PLEG): container finished" podID="d4baf450-6210-4b45-9c46-810b190b1741" containerID="0f6c9c0c9bab68227dc882d7aaa3f2bc6901e6fbba9bf5c926069958734387ff" exitCode=0 Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.510226 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" event={"ID":"d4baf450-6210-4b45-9c46-810b190b1741","Type":"ContainerDied","Data":"0f6c9c0c9bab68227dc882d7aaa3f2bc6901e6fbba9bf5c926069958734387ff"} Sep 29 14:04:42 crc kubenswrapper[4634]: I0929 14:04:42.574611 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5bfb7db698-tmn8x" podStartSLOduration=4.574580026 podStartE2EDuration="4.574580026s" podCreationTimestamp="2025-09-29 14:04:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:42.542201427 +0000 UTC m=+1213.110929176" watchObservedRunningTime="2025-09-29 14:04:42.574580026 +0000 UTC m=+1213.143307775" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.441222 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-77c768456b-27trs"] Sep 29 14:04:43 crc kubenswrapper[4634]: E0929 14:04:43.441900 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" containerName="init" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.441914 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" containerName="init" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.442148 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40" containerName="init" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.443198 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.449396 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.450207 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.519839 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-77c768456b-27trs"] Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.546791 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf926\" (UniqueName: \"kubernetes.io/projected/4a923c46-c064-4dbd-b91d-cc1379e39d35-kube-api-access-wf926\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.546861 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-config-data\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.546919 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-config-data-custom\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.546965 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-internal-tls-certs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.547043 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-combined-ca-bundle\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.547161 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-public-tls-certs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.547228 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a923c46-c064-4dbd-b91d-cc1379e39d35-logs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.649636 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf926\" (UniqueName: \"kubernetes.io/projected/4a923c46-c064-4dbd-b91d-cc1379e39d35-kube-api-access-wf926\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.649706 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-config-data\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.649788 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-config-data-custom\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.649872 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-internal-tls-certs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.649889 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-combined-ca-bundle\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.649999 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-public-tls-certs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.650098 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a923c46-c064-4dbd-b91d-cc1379e39d35-logs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.656122 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a923c46-c064-4dbd-b91d-cc1379e39d35-logs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.660362 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-public-tls-certs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.665785 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-internal-tls-certs\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.666876 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-combined-ca-bundle\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.671753 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf926\" (UniqueName: \"kubernetes.io/projected/4a923c46-c064-4dbd-b91d-cc1379e39d35-kube-api-access-wf926\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.679461 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-config-data-custom\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.684387 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a923c46-c064-4dbd-b91d-cc1379e39d35-config-data\") pod \"barbican-api-77c768456b-27trs\" (UID: \"4a923c46-c064-4dbd-b91d-cc1379e39d35\") " pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:43 crc kubenswrapper[4634]: I0929 14:04:43.775768 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:45 crc kubenswrapper[4634]: I0929 14:04:45.029229 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-77c768456b-27trs"] Sep 29 14:04:45 crc kubenswrapper[4634]: I0929 14:04:45.590016 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-645d46567c-ngxbx" event={"ID":"c697d1f5-42d0-4a87-9704-64a6e1406db1","Type":"ContainerStarted","Data":"b0aa37371fd4d2e1c176094bc279defd01ce03872dd37cd3d3f36e309e5d5d55"} Sep 29 14:04:45 crc kubenswrapper[4634]: I0929 14:04:45.592527 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" event={"ID":"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2","Type":"ContainerStarted","Data":"3c975be036a06fc12148abf1eb81cf0d255748b4b98c2d5800c870936d535b23"} Sep 29 14:04:45 crc kubenswrapper[4634]: I0929 14:04:45.595310 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" event={"ID":"d4baf450-6210-4b45-9c46-810b190b1741","Type":"ContainerStarted","Data":"f5b0a49a7b4b03ac4db641c9c08dac48d13440ce16581479ce0d0f1f8e241aab"} Sep 29 14:04:45 crc kubenswrapper[4634]: I0929 14:04:45.595530 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:45 crc kubenswrapper[4634]: I0929 14:04:45.598152 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77c768456b-27trs" event={"ID":"4a923c46-c064-4dbd-b91d-cc1379e39d35","Type":"ContainerStarted","Data":"e34ba015bad36c0a57520fc852ad83980d0d3375af5aa4d52ba1a4057e17529e"} Sep 29 14:04:45 crc kubenswrapper[4634]: I0929 14:04:45.629170 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" podStartSLOduration=7.629150176 podStartE2EDuration="7.629150176s" podCreationTimestamp="2025-09-29 14:04:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:45.621373743 +0000 UTC m=+1216.190101482" watchObservedRunningTime="2025-09-29 14:04:45.629150176 +0000 UTC m=+1216.197877925" Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.610136 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dsvdn" event={"ID":"519a22f3-5513-430a-bd2b-6670ece06c2d","Type":"ContainerStarted","Data":"b85849bb737513a06fb85cdc3cdd5d8819d2622400db7de04265d258e1723405"} Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.614649 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-645d46567c-ngxbx" event={"ID":"c697d1f5-42d0-4a87-9704-64a6e1406db1","Type":"ContainerStarted","Data":"8a71355edcd72c34cfa963334fbce129e2b41d4ff33568fc5d6c15e327d7997a"} Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.617678 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" event={"ID":"c1e86a12-7d7e-4bbe-bcf2-030f754a91a2","Type":"ContainerStarted","Data":"ed1de45d447e12131702289fb229b656721646faacaed836c1cc560776ae4837"} Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.619763 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77c768456b-27trs" event={"ID":"4a923c46-c064-4dbd-b91d-cc1379e39d35","Type":"ContainerStarted","Data":"5fd5b9b3a1458ca407fb6c8678adeefab5ea61547bb0b19132ef5c86b6ddd9ff"} Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.619818 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-77c768456b-27trs" event={"ID":"4a923c46-c064-4dbd-b91d-cc1379e39d35","Type":"ContainerStarted","Data":"8695868b50e17c1068ad63a6772c964b811ca8c6db7c4e5191c24cb4c96bb2c9"} Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.656611 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-dsvdn" podStartSLOduration=21.715356442 podStartE2EDuration="1m17.656582204s" podCreationTimestamp="2025-09-29 14:03:29 +0000 UTC" firstStartedPulling="2025-09-29 14:03:48.29561057 +0000 UTC m=+1158.864338319" lastFinishedPulling="2025-09-29 14:04:44.236836332 +0000 UTC m=+1214.805564081" observedRunningTime="2025-09-29 14:04:46.640448086 +0000 UTC m=+1217.209175835" watchObservedRunningTime="2025-09-29 14:04:46.656582204 +0000 UTC m=+1217.225309953" Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.661802 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-645d46567c-ngxbx" podStartSLOduration=4.826877797 podStartE2EDuration="8.66178342s" podCreationTimestamp="2025-09-29 14:04:38 +0000 UTC" firstStartedPulling="2025-09-29 14:04:40.357530143 +0000 UTC m=+1210.926257892" lastFinishedPulling="2025-09-29 14:04:44.192435766 +0000 UTC m=+1214.761163515" observedRunningTime="2025-09-29 14:04:46.657994436 +0000 UTC m=+1217.226722185" watchObservedRunningTime="2025-09-29 14:04:46.66178342 +0000 UTC m=+1217.230511169" Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.784596 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-77c768456b-27trs" podStartSLOduration=3.784568601 podStartE2EDuration="3.784568601s" podCreationTimestamp="2025-09-29 14:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:04:46.702838244 +0000 UTC m=+1217.271565993" watchObservedRunningTime="2025-09-29 14:04:46.784568601 +0000 UTC m=+1217.353296360" Sep 29 14:04:46 crc kubenswrapper[4634]: I0929 14:04:46.791674 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-5658b5d69b-mlcxf" podStartSLOduration=4.538291689 podStartE2EDuration="8.791645658s" podCreationTimestamp="2025-09-29 14:04:38 +0000 UTC" firstStartedPulling="2025-09-29 14:04:39.964326918 +0000 UTC m=+1210.533054667" lastFinishedPulling="2025-09-29 14:04:44.217680887 +0000 UTC m=+1214.786408636" observedRunningTime="2025-09-29 14:04:46.743196241 +0000 UTC m=+1217.311924020" watchObservedRunningTime="2025-09-29 14:04:46.791645658 +0000 UTC m=+1217.360373407" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.003934 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.004026 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.047616 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.065010 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.125619 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-587bf8586b-wjkjk" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.255323 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.255376 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.339346 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.389376 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.643187 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.643248 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.643265 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.643279 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.643296 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 14:04:47 crc kubenswrapper[4634]: I0929 14:04:47.643310 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.200291 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.260264 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-9d854"] Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.260565 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" podUID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerName="dnsmasq-dns" containerID="cri-o://5d4e8c01cdc480f7b204c8eb09455d4b04756506570d3e1bde68d5d1aab9c079" gracePeriod=10 Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.683935 4634 generic.go:334] "Generic (PLEG): container finished" podID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerID="5d4e8c01cdc480f7b204c8eb09455d4b04756506570d3e1bde68d5d1aab9c079" exitCode=0 Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.684407 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" event={"ID":"d3aecf87-94be-4fdc-81b4-24eddad58770","Type":"ContainerDied","Data":"5d4e8c01cdc480f7b204c8eb09455d4b04756506570d3e1bde68d5d1aab9c079"} Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.918362 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.951010 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-config\") pod \"d3aecf87-94be-4fdc-81b4-24eddad58770\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.951064 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-swift-storage-0\") pod \"d3aecf87-94be-4fdc-81b4-24eddad58770\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.951148 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-sb\") pod \"d3aecf87-94be-4fdc-81b4-24eddad58770\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.951209 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-svc\") pod \"d3aecf87-94be-4fdc-81b4-24eddad58770\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.951233 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zvwp\" (UniqueName: \"kubernetes.io/projected/d3aecf87-94be-4fdc-81b4-24eddad58770-kube-api-access-7zvwp\") pod \"d3aecf87-94be-4fdc-81b4-24eddad58770\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.951284 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-nb\") pod \"d3aecf87-94be-4fdc-81b4-24eddad58770\" (UID: \"d3aecf87-94be-4fdc-81b4-24eddad58770\") " Sep 29 14:04:49 crc kubenswrapper[4634]: I0929 14:04:49.987526 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3aecf87-94be-4fdc-81b4-24eddad58770-kube-api-access-7zvwp" (OuterVolumeSpecName: "kube-api-access-7zvwp") pod "d3aecf87-94be-4fdc-81b4-24eddad58770" (UID: "d3aecf87-94be-4fdc-81b4-24eddad58770"). InnerVolumeSpecName "kube-api-access-7zvwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.065607 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zvwp\" (UniqueName: \"kubernetes.io/projected/d3aecf87-94be-4fdc-81b4-24eddad58770-kube-api-access-7zvwp\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.144460 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-config" (OuterVolumeSpecName: "config") pod "d3aecf87-94be-4fdc-81b4-24eddad58770" (UID: "d3aecf87-94be-4fdc-81b4-24eddad58770"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.170221 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.234412 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d3aecf87-94be-4fdc-81b4-24eddad58770" (UID: "d3aecf87-94be-4fdc-81b4-24eddad58770"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.238718 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.238973 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d3aecf87-94be-4fdc-81b4-24eddad58770" (UID: "d3aecf87-94be-4fdc-81b4-24eddad58770"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:50 crc kubenswrapper[4634]: E0929 14:04:50.239281 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerName="init" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.239307 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerName="init" Sep 29 14:04:50 crc kubenswrapper[4634]: E0929 14:04:50.239321 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerName="dnsmasq-dns" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.239330 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerName="dnsmasq-dns" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.239574 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerName="dnsmasq-dns" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.240388 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.250065 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.250538 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.250631 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-xlnk9" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.255720 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d3aecf87-94be-4fdc-81b4-24eddad58770" (UID: "d3aecf87-94be-4fdc-81b4-24eddad58770"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.273019 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.273058 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.273072 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.293296 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.329482 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d3aecf87-94be-4fdc-81b4-24eddad58770" (UID: "d3aecf87-94be-4fdc-81b4-24eddad58770"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.376228 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/041479d7-0e40-4b0c-b301-f79c133394dc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.376290 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg5bp\" (UniqueName: \"kubernetes.io/projected/041479d7-0e40-4b0c-b301-f79c133394dc-kube-api-access-hg5bp\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.376371 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/041479d7-0e40-4b0c-b301-f79c133394dc-openstack-config-secret\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.376396 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/041479d7-0e40-4b0c-b301-f79c133394dc-openstack-config\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.376448 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d3aecf87-94be-4fdc-81b4-24eddad58770-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.478422 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/041479d7-0e40-4b0c-b301-f79c133394dc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.478476 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg5bp\" (UniqueName: \"kubernetes.io/projected/041479d7-0e40-4b0c-b301-f79c133394dc-kube-api-access-hg5bp\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.478568 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/041479d7-0e40-4b0c-b301-f79c133394dc-openstack-config-secret\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.478585 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/041479d7-0e40-4b0c-b301-f79c133394dc-openstack-config\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.479618 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/041479d7-0e40-4b0c-b301-f79c133394dc-openstack-config\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.483845 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/041479d7-0e40-4b0c-b301-f79c133394dc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.494345 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/041479d7-0e40-4b0c-b301-f79c133394dc-openstack-config-secret\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.519814 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg5bp\" (UniqueName: \"kubernetes.io/projected/041479d7-0e40-4b0c-b301-f79c133394dc-kube-api-access-hg5bp\") pod \"openstackclient\" (UID: \"041479d7-0e40-4b0c-b301-f79c133394dc\") " pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.598771 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.759525 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" event={"ID":"d3aecf87-94be-4fdc-81b4-24eddad58770","Type":"ContainerDied","Data":"cdbec4c773addfee57748bc7b63285bbe9daffb43a5376b7d4630337e976ab5f"} Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.759588 4634 scope.go:117] "RemoveContainer" containerID="5d4e8c01cdc480f7b204c8eb09455d4b04756506570d3e1bde68d5d1aab9c079" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.759748 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.830507 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-9d854"] Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.851451 4634 scope.go:117] "RemoveContainer" containerID="40b3532afabbf65d06826e9b75c7a0d7e9b8b15da2d8a92cc18440c6f5cb67d2" Sep 29 14:04:50 crc kubenswrapper[4634]: I0929 14:04:50.857339 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-9d854"] Sep 29 14:04:51 crc kubenswrapper[4634]: E0929 14:04:51.007929 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3aecf87_94be_4fdc_81b4_24eddad58770.slice\": RecentStats: unable to find data in memory cache]" Sep 29 14:04:51 crc kubenswrapper[4634]: I0929 14:04:51.297036 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 14:04:51 crc kubenswrapper[4634]: I0929 14:04:51.596400 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7bbc59f76-j4rjv" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 14:04:51 crc kubenswrapper[4634]: I0929 14:04:51.771245 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"041479d7-0e40-4b0c-b301-f79c133394dc","Type":"ContainerStarted","Data":"163ed023c544b8215296e87e6967e654a260601f49905aecf587081bdc7718d8"} Sep 29 14:04:52 crc kubenswrapper[4634]: I0929 14:04:52.154980 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3aecf87-94be-4fdc-81b4-24eddad58770" path="/var/lib/kubelet/pods/d3aecf87-94be-4fdc-81b4-24eddad58770/volumes" Sep 29 14:04:52 crc kubenswrapper[4634]: I0929 14:04:52.939621 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 14:04:52 crc kubenswrapper[4634]: I0929 14:04:52.939858 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 14:04:52 crc kubenswrapper[4634]: I0929 14:04:52.953617 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:52 crc kubenswrapper[4634]: I0929 14:04:52.953778 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 14:04:52 crc kubenswrapper[4634]: I0929 14:04:52.955650 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 14:04:53 crc kubenswrapper[4634]: I0929 14:04:53.054379 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 29 14:04:53 crc kubenswrapper[4634]: I0929 14:04:53.292373 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 14:04:53 crc kubenswrapper[4634]: I0929 14:04:53.333416 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:04:53 crc kubenswrapper[4634]: I0929 14:04:53.876413 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:54 crc kubenswrapper[4634]: I0929 14:04:54.264297 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:04:54 crc kubenswrapper[4634]: I0929 14:04:54.890774 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8b5c85b87-9d854" podUID="d3aecf87-94be-4fdc-81b4-24eddad58770" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.150:5353: i/o timeout" Sep 29 14:04:56 crc kubenswrapper[4634]: I0929 14:04:56.834785 4634 generic.go:334] "Generic (PLEG): container finished" podID="519a22f3-5513-430a-bd2b-6670ece06c2d" containerID="b85849bb737513a06fb85cdc3cdd5d8819d2622400db7de04265d258e1723405" exitCode=0 Sep 29 14:04:56 crc kubenswrapper[4634]: I0929 14:04:56.834880 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dsvdn" event={"ID":"519a22f3-5513-430a-bd2b-6670ece06c2d","Type":"ContainerDied","Data":"b85849bb737513a06fb85cdc3cdd5d8819d2622400db7de04265d258e1723405"} Sep 29 14:04:57 crc kubenswrapper[4634]: I0929 14:04:57.598202 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:57 crc kubenswrapper[4634]: I0929 14:04:57.800851 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-77c768456b-27trs" podUID="4a923c46-c064-4dbd-b91d-cc1379e39d35" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.377853 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.480724 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-77c768456b-27trs" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.573251 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6b7c5bc546-zfttb"] Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.573545 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" containerID="cri-o://04ad20588640ae84f0a4b506764f8f9f7858baefcada5c481aebc89d21c4b0fc" gracePeriod=30 Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.574218 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" containerID="cri-o://bcdb05a5274428257047e4b7630d9d63c64ed44adcdbd3938884f86db208a259" gracePeriod=30 Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.643008 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": EOF" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.674758 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": EOF" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.821323 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.895482 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-db-sync-config-data\") pod \"519a22f3-5513-430a-bd2b-6670ece06c2d\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.895557 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-config-data\") pod \"519a22f3-5513-430a-bd2b-6670ece06c2d\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.895602 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-combined-ca-bundle\") pod \"519a22f3-5513-430a-bd2b-6670ece06c2d\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.895686 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5tsd\" (UniqueName: \"kubernetes.io/projected/519a22f3-5513-430a-bd2b-6670ece06c2d-kube-api-access-k5tsd\") pod \"519a22f3-5513-430a-bd2b-6670ece06c2d\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.895765 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-scripts\") pod \"519a22f3-5513-430a-bd2b-6670ece06c2d\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.895803 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/519a22f3-5513-430a-bd2b-6670ece06c2d-etc-machine-id\") pod \"519a22f3-5513-430a-bd2b-6670ece06c2d\" (UID: \"519a22f3-5513-430a-bd2b-6670ece06c2d\") " Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.896285 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/519a22f3-5513-430a-bd2b-6670ece06c2d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "519a22f3-5513-430a-bd2b-6670ece06c2d" (UID: "519a22f3-5513-430a-bd2b-6670ece06c2d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.910920 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/519a22f3-5513-430a-bd2b-6670ece06c2d-kube-api-access-k5tsd" (OuterVolumeSpecName: "kube-api-access-k5tsd") pod "519a22f3-5513-430a-bd2b-6670ece06c2d" (UID: "519a22f3-5513-430a-bd2b-6670ece06c2d"). InnerVolumeSpecName "kube-api-access-k5tsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.911225 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-scripts" (OuterVolumeSpecName: "scripts") pod "519a22f3-5513-430a-bd2b-6670ece06c2d" (UID: "519a22f3-5513-430a-bd2b-6670ece06c2d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.913757 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-dsvdn" event={"ID":"519a22f3-5513-430a-bd2b-6670ece06c2d","Type":"ContainerDied","Data":"f0dc39cdab7b1e61438fdefeea0a30426f80ec92fadbf57bdf574d46ab53b609"} Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.913801 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0dc39cdab7b1e61438fdefeea0a30426f80ec92fadbf57bdf574d46ab53b609" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.913878 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-dsvdn" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.918212 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "519a22f3-5513-430a-bd2b-6670ece06c2d" (UID: "519a22f3-5513-430a-bd2b-6670ece06c2d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.926430 4634 generic.go:334] "Generic (PLEG): container finished" podID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerID="04ad20588640ae84f0a4b506764f8f9f7858baefcada5c481aebc89d21c4b0fc" exitCode=143 Sep 29 14:04:58 crc kubenswrapper[4634]: I0929 14:04:58.926481 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b7c5bc546-zfttb" event={"ID":"845a9190-05ae-4893-a7e3-d4bcaa7d8d53","Type":"ContainerDied","Data":"04ad20588640ae84f0a4b506764f8f9f7858baefcada5c481aebc89d21c4b0fc"} Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.001360 4634 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/519a22f3-5513-430a-bd2b-6670ece06c2d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.001397 4634 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.001407 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5tsd\" (UniqueName: \"kubernetes.io/projected/519a22f3-5513-430a-bd2b-6670ece06c2d-kube-api-access-k5tsd\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.001419 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.017850 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "519a22f3-5513-430a-bd2b-6670ece06c2d" (UID: "519a22f3-5513-430a-bd2b-6670ece06c2d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.046968 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-config-data" (OuterVolumeSpecName: "config-data") pod "519a22f3-5513-430a-bd2b-6670ece06c2d" (UID: "519a22f3-5513-430a-bd2b-6670ece06c2d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.103490 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.103534 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/519a22f3-5513-430a-bd2b-6670ece06c2d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.203447 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:04:59 crc kubenswrapper[4634]: E0929 14:04:59.204222 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519a22f3-5513-430a-bd2b-6670ece06c2d" containerName="cinder-db-sync" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.204238 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="519a22f3-5513-430a-bd2b-6670ece06c2d" containerName="cinder-db-sync" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.204460 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="519a22f3-5513-430a-bd2b-6670ece06c2d" containerName="cinder-db-sync" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.206819 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.210637 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.323148 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-scripts\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.323749 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.324350 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.324380 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ab9885-b2c0-483d-901a-9f850dffd96f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.324617 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.324721 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtzfs\" (UniqueName: \"kubernetes.io/projected/40ab9885-b2c0-483d-901a-9f850dffd96f-kube-api-access-vtzfs\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.346396 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.390765 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-smwzr"] Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.398249 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.422549 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-smwzr"] Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.444262 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtzfs\" (UniqueName: \"kubernetes.io/projected/40ab9885-b2c0-483d-901a-9f850dffd96f-kube-api-access-vtzfs\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.444326 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-scripts\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.444388 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.444454 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.444470 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ab9885-b2c0-483d-901a-9f850dffd96f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.444498 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.449547 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ab9885-b2c0-483d-901a-9f850dffd96f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.459750 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.461022 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-scripts\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.468926 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.488061 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.508791 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtzfs\" (UniqueName: \"kubernetes.io/projected/40ab9885-b2c0-483d-901a-9f850dffd96f-kube-api-access-vtzfs\") pod \"cinder-scheduler-0\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.527783 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.529442 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.535758 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.549277 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.555768 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.555818 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-965sj\" (UniqueName: \"kubernetes.io/projected/efd571a2-8f7f-4962-ae08-c415dbaab95e-kube-api-access-965sj\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.555871 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.556160 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-config\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.556200 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.556251 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-svc\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.564565 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659056 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22eacd73-72e4-43db-8c89-6a380b51a08e-logs\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659116 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data-custom\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659146 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659165 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22eacd73-72e4-43db-8c89-6a380b51a08e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659193 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-config\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659225 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659252 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-svc\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659297 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659324 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-scripts\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659378 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659396 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-965sj\" (UniqueName: \"kubernetes.io/projected/efd571a2-8f7f-4962-ae08-c415dbaab95e-kube-api-access-965sj\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659420 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.659449 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2mh7\" (UniqueName: \"kubernetes.io/projected/22eacd73-72e4-43db-8c89-6a380b51a08e-kube-api-access-q2mh7\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.661960 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.662502 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-svc\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.663141 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.663532 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.667059 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-config\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.704975 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-965sj\" (UniqueName: \"kubernetes.io/projected/efd571a2-8f7f-4962-ae08-c415dbaab95e-kube-api-access-965sj\") pod \"dnsmasq-dns-5784cf869f-smwzr\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.736557 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.764540 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2mh7\" (UniqueName: \"kubernetes.io/projected/22eacd73-72e4-43db-8c89-6a380b51a08e-kube-api-access-q2mh7\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.764614 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22eacd73-72e4-43db-8c89-6a380b51a08e-logs\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.764638 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data-custom\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.764676 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.764695 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22eacd73-72e4-43db-8c89-6a380b51a08e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.764757 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.764783 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-scripts\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.767840 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22eacd73-72e4-43db-8c89-6a380b51a08e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.776836 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22eacd73-72e4-43db-8c89-6a380b51a08e-logs\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.782884 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.785650 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data-custom\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.788819 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.795969 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-scripts\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.812022 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2mh7\" (UniqueName: \"kubernetes.io/projected/22eacd73-72e4-43db-8c89-6a380b51a08e-kube-api-access-q2mh7\") pod \"cinder-api-0\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " pod="openstack/cinder-api-0" Sep 29 14:04:59 crc kubenswrapper[4634]: I0929 14:04:59.864742 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 14:05:00 crc kubenswrapper[4634]: I0929 14:05:00.626819 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:05:00 crc kubenswrapper[4634]: I0929 14:05:00.790093 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-smwzr"] Sep 29 14:05:00 crc kubenswrapper[4634]: I0929 14:05:00.998022 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ab9885-b2c0-483d-901a-9f850dffd96f","Type":"ContainerStarted","Data":"f48aee11d689818f9aa513175c51b734584866d9545e7c095fc8ff5f4d00d50e"} Sep 29 14:05:01 crc kubenswrapper[4634]: I0929 14:05:01.002045 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:05:01 crc kubenswrapper[4634]: I0929 14:05:01.007286 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" event={"ID":"efd571a2-8f7f-4962-ae08-c415dbaab95e","Type":"ContainerStarted","Data":"fe564f054c40a0bd4313c18f1838786fab4589a30b91e4baeceb2a84eba9049d"} Sep 29 14:05:01 crc kubenswrapper[4634]: I0929 14:05:01.605206 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7bbc59f76-j4rjv" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 14:05:01 crc kubenswrapper[4634]: I0929 14:05:01.605726 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:05:02 crc kubenswrapper[4634]: I0929 14:05:02.049751 4634 generic.go:334] "Generic (PLEG): container finished" podID="efd571a2-8f7f-4962-ae08-c415dbaab95e" containerID="fd07837d26e813a34d0297e23190b955a3e4b07ed08d8e9434a2ac26ca3ee485" exitCode=0 Sep 29 14:05:02 crc kubenswrapper[4634]: I0929 14:05:02.049907 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" event={"ID":"efd571a2-8f7f-4962-ae08-c415dbaab95e","Type":"ContainerDied","Data":"fd07837d26e813a34d0297e23190b955a3e4b07ed08d8e9434a2ac26ca3ee485"} Sep 29 14:05:02 crc kubenswrapper[4634]: I0929 14:05:02.069426 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22eacd73-72e4-43db-8c89-6a380b51a08e","Type":"ContainerStarted","Data":"73bdb45fd062054a9f04019c1561a29f2cf1506437b9e9dba0ed387974b13e11"} Sep 29 14:05:02 crc kubenswrapper[4634]: I0929 14:05:02.083562 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-77c768456b-27trs" podUID="4a923c46-c064-4dbd-b91d-cc1379e39d35" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:02 crc kubenswrapper[4634]: I0929 14:05:02.806376 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-77c768456b-27trs" podUID="4a923c46-c064-4dbd-b91d-cc1379e39d35" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:03 crc kubenswrapper[4634]: I0929 14:05:03.115827 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" event={"ID":"efd571a2-8f7f-4962-ae08-c415dbaab95e","Type":"ContainerStarted","Data":"485059c2feadad66c844ccb92b6d4ae98c977bb5fb6f94fd4900b817d764006b"} Sep 29 14:05:03 crc kubenswrapper[4634]: I0929 14:05:03.116281 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:05:03 crc kubenswrapper[4634]: I0929 14:05:03.150022 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22eacd73-72e4-43db-8c89-6a380b51a08e","Type":"ContainerStarted","Data":"6e0b5c89c3a5dd3a5b1a943b111b5fa740c956fc698e2f48d3abbb617e765949"} Sep 29 14:05:03 crc kubenswrapper[4634]: I0929 14:05:03.265023 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" podStartSLOduration=4.265002023 podStartE2EDuration="4.265002023s" podCreationTimestamp="2025-09-29 14:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:03.187353456 +0000 UTC m=+1233.756081205" watchObservedRunningTime="2025-09-29 14:05:03.265002023 +0000 UTC m=+1233.833729772" Sep 29 14:05:03 crc kubenswrapper[4634]: I0929 14:05:03.278235 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:05:03 crc kubenswrapper[4634]: I0929 14:05:03.485702 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-77c768456b-27trs" podUID="4a923c46-c064-4dbd-b91d-cc1379e39d35" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:03 crc kubenswrapper[4634]: I0929 14:05:03.716385 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:03 crc kubenswrapper[4634]: I0929 14:05:03.782304 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-77c768456b-27trs" podUID="4a923c46-c064-4dbd-b91d-cc1379e39d35" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.164:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:04 crc kubenswrapper[4634]: I0929 14:05:04.195526 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22eacd73-72e4-43db-8c89-6a380b51a08e","Type":"ContainerStarted","Data":"45005b4b189951ad9dbb4e11d7957914c56cddd75261b7d2f7cf763bbc1a150d"} Sep 29 14:05:04 crc kubenswrapper[4634]: I0929 14:05:04.195577 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api-log" containerID="cri-o://6e0b5c89c3a5dd3a5b1a943b111b5fa740c956fc698e2f48d3abbb617e765949" gracePeriod=30 Sep 29 14:05:04 crc kubenswrapper[4634]: I0929 14:05:04.195986 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api" containerID="cri-o://45005b4b189951ad9dbb4e11d7957914c56cddd75261b7d2f7cf763bbc1a150d" gracePeriod=30 Sep 29 14:05:04 crc kubenswrapper[4634]: I0929 14:05:04.196047 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 14:05:04 crc kubenswrapper[4634]: I0929 14:05:04.199731 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ab9885-b2c0-483d-901a-9f850dffd96f","Type":"ContainerStarted","Data":"904a6af8671f5422d015207c4050d71ec1cbe3ade06c9cb0228e144977fc0a90"} Sep 29 14:05:04 crc kubenswrapper[4634]: I0929 14:05:04.225134 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.225115785 podStartE2EDuration="5.225115785s" podCreationTimestamp="2025-09-29 14:04:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:04.219780116 +0000 UTC m=+1234.788507855" watchObservedRunningTime="2025-09-29 14:05:04.225115785 +0000 UTC m=+1234.793843524" Sep 29 14:05:04 crc kubenswrapper[4634]: I0929 14:05:04.332302 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:05 crc kubenswrapper[4634]: I0929 14:05:05.220587 4634 generic.go:334] "Generic (PLEG): container finished" podID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerID="6e0b5c89c3a5dd3a5b1a943b111b5fa740c956fc698e2f48d3abbb617e765949" exitCode=143 Sep 29 14:05:05 crc kubenswrapper[4634]: I0929 14:05:05.220781 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22eacd73-72e4-43db-8c89-6a380b51a08e","Type":"ContainerDied","Data":"6e0b5c89c3a5dd3a5b1a943b111b5fa740c956fc698e2f48d3abbb617e765949"} Sep 29 14:05:05 crc kubenswrapper[4634]: I0929 14:05:05.223110 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ab9885-b2c0-483d-901a-9f850dffd96f","Type":"ContainerStarted","Data":"c1a185d440360438a56d988ecef931b8e483a3d534199678574eabb4afc3c501"} Sep 29 14:05:05 crc kubenswrapper[4634]: I0929 14:05:05.899231 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:05:05 crc kubenswrapper[4634]: I0929 14:05:05.927585 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.660909556 podStartE2EDuration="6.927559324s" podCreationTimestamp="2025-09-29 14:04:59 +0000 UTC" firstStartedPulling="2025-09-29 14:05:00.696316449 +0000 UTC m=+1231.265044198" lastFinishedPulling="2025-09-29 14:05:01.962966217 +0000 UTC m=+1232.531693966" observedRunningTime="2025-09-29 14:05:05.248252288 +0000 UTC m=+1235.816980037" watchObservedRunningTime="2025-09-29 14:05:05.927559324 +0000 UTC m=+1236.496287073" Sep 29 14:05:06 crc kubenswrapper[4634]: I0929 14:05:06.205879 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": read tcp 10.217.0.2:55164->10.217.0.163:9311: read: connection reset by peer" Sep 29 14:05:06 crc kubenswrapper[4634]: I0929 14:05:06.206226 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": read tcp 10.217.0.2:55150->10.217.0.163:9311: read: connection reset by peer" Sep 29 14:05:06 crc kubenswrapper[4634]: I0929 14:05:06.206510 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:05:06 crc kubenswrapper[4634]: I0929 14:05:06.206821 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Sep 29 14:05:06 crc kubenswrapper[4634]: I0929 14:05:06.998733 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-847d5655ff-zzkf2"] Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.000814 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.003671 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.003890 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.004380 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.034801 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-847d5655ff-zzkf2"] Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.083896 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9f537d-9c82-4675-aeaf-c0e4656a1330-run-httpd\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.083962 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9f537d-9c82-4675-aeaf-c0e4656a1330-log-httpd\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.084048 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-config-data\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.084111 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-public-tls-certs\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.084155 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-combined-ca-bundle\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.084193 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/eb9f537d-9c82-4675-aeaf-c0e4656a1330-etc-swift\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.084333 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-internal-tls-certs\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.084357 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd78c\" (UniqueName: \"kubernetes.io/projected/eb9f537d-9c82-4675-aeaf-c0e4656a1330-kube-api-access-cd78c\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.191637 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-config-data\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.191757 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-public-tls-certs\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.191877 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-combined-ca-bundle\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.191929 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/eb9f537d-9c82-4675-aeaf-c0e4656a1330-etc-swift\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.192062 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-internal-tls-certs\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.192103 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd78c\" (UniqueName: \"kubernetes.io/projected/eb9f537d-9c82-4675-aeaf-c0e4656a1330-kube-api-access-cd78c\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.192180 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9f537d-9c82-4675-aeaf-c0e4656a1330-run-httpd\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.192306 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9f537d-9c82-4675-aeaf-c0e4656a1330-log-httpd\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.195829 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9f537d-9c82-4675-aeaf-c0e4656a1330-run-httpd\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.195884 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb9f537d-9c82-4675-aeaf-c0e4656a1330-log-httpd\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.200693 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-config-data\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.202333 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-public-tls-certs\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.203674 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/eb9f537d-9c82-4675-aeaf-c0e4656a1330-etc-swift\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.204002 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-combined-ca-bundle\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.205769 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb9f537d-9c82-4675-aeaf-c0e4656a1330-internal-tls-certs\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.217302 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd78c\" (UniqueName: \"kubernetes.io/projected/eb9f537d-9c82-4675-aeaf-c0e4656a1330-kube-api-access-cd78c\") pod \"swift-proxy-847d5655ff-zzkf2\" (UID: \"eb9f537d-9c82-4675-aeaf-c0e4656a1330\") " pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.259407 4634 generic.go:334] "Generic (PLEG): container finished" podID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerID="bcdb05a5274428257047e4b7630d9d63c64ed44adcdbd3938884f86db208a259" exitCode=0 Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.259459 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b7c5bc546-zfttb" event={"ID":"845a9190-05ae-4893-a7e3-d4bcaa7d8d53","Type":"ContainerDied","Data":"bcdb05a5274428257047e4b7630d9d63c64ed44adcdbd3938884f86db208a259"} Sep 29 14:05:07 crc kubenswrapper[4634]: I0929 14:05:07.373998 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:08 crc kubenswrapper[4634]: I0929 14:05:08.013977 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7f758ffcf7-qsxtz" Sep 29 14:05:08 crc kubenswrapper[4634]: I0929 14:05:08.138677 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-54f5455b48-gwkmt"] Sep 29 14:05:08 crc kubenswrapper[4634]: I0929 14:05:08.138896 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-54f5455b48-gwkmt" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerName="neutron-api" containerID="cri-o://234b74c8bccfa87df5a4ee49d1a3189478c634ddd98b00ae4dd119a84e8b2908" gracePeriod=30 Sep 29 14:05:08 crc kubenswrapper[4634]: I0929 14:05:08.140123 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-54f5455b48-gwkmt" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerName="neutron-httpd" containerID="cri-o://c99e5c0faef3b7c8c5ecbf0919ee165ee2274be3982d3263030eb303399e6d9d" gracePeriod=30 Sep 29 14:05:08 crc kubenswrapper[4634]: I0929 14:05:08.283266 4634 generic.go:334] "Generic (PLEG): container finished" podID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerID="f3a0d79b872cc4c22a86229d575f404459460f94214c1109ef0ffee698ef1de4" exitCode=137 Sep 29 14:05:08 crc kubenswrapper[4634]: I0929 14:05:08.283340 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"061c94cb-cc6c-4a14-a0c4-4bcef38173b7","Type":"ContainerDied","Data":"f3a0d79b872cc4c22a86229d575f404459460f94214c1109ef0ffee698ef1de4"} Sep 29 14:05:08 crc kubenswrapper[4634]: I0929 14:05:08.293079 4634 generic.go:334] "Generic (PLEG): container finished" podID="1427174f-e673-4c3e-bf36-f1463327fd61" containerID="7dbc773a851935823ae057d5f45287b8cb997b84d3d50b9ecf58704e2fd62dd0" exitCode=137 Sep 29 14:05:08 crc kubenswrapper[4634]: I0929 14:05:08.293190 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7bbc59f76-j4rjv" event={"ID":"1427174f-e673-4c3e-bf36-f1463327fd61","Type":"ContainerDied","Data":"7dbc773a851935823ae057d5f45287b8cb997b84d3d50b9ecf58704e2fd62dd0"} Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.289921 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.290404 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.292585 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.307516 4634 generic.go:334] "Generic (PLEG): container finished" podID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerID="c99e5c0faef3b7c8c5ecbf0919ee165ee2274be3982d3263030eb303399e6d9d" exitCode=0 Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.307567 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54f5455b48-gwkmt" event={"ID":"2d7c0763-bf32-4ecb-b1bd-b989e492e943","Type":"ContainerDied","Data":"c99e5c0faef3b7c8c5ecbf0919ee165ee2274be3982d3263030eb303399e6d9d"} Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.551028 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.658744 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-lbqsf"] Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.660255 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lbqsf" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.672279 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-lbqsf"] Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.745585 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.752966 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-jps78"] Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.754481 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jps78" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.794735 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jps78"] Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.866482 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jddgc"] Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.867099 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" podUID="d4baf450-6210-4b45-9c46-810b190b1741" containerName="dnsmasq-dns" containerID="cri-o://f5b0a49a7b4b03ac4db641c9c08dac48d13440ce16581479ce0d0f1f8e241aab" gracePeriod=10 Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.871819 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcqwf\" (UniqueName: \"kubernetes.io/projected/e673e0f3-02fe-40fb-8fc5-8ab681f26068-kube-api-access-gcqwf\") pod \"nova-api-db-create-lbqsf\" (UID: \"e673e0f3-02fe-40fb-8fc5-8ab681f26068\") " pod="openstack/nova-api-db-create-lbqsf" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.872272 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsgtp\" (UniqueName: \"kubernetes.io/projected/e2280fa9-785d-467b-a50c-010d55023f64-kube-api-access-hsgtp\") pod \"nova-cell0-db-create-jps78\" (UID: \"e2280fa9-785d-467b-a50c-010d55023f64\") " pod="openstack/nova-cell0-db-create-jps78" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.979492 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcqwf\" (UniqueName: \"kubernetes.io/projected/e673e0f3-02fe-40fb-8fc5-8ab681f26068-kube-api-access-gcqwf\") pod \"nova-api-db-create-lbqsf\" (UID: \"e673e0f3-02fe-40fb-8fc5-8ab681f26068\") " pod="openstack/nova-api-db-create-lbqsf" Sep 29 14:05:09 crc kubenswrapper[4634]: I0929 14:05:09.979909 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsgtp\" (UniqueName: \"kubernetes.io/projected/e2280fa9-785d-467b-a50c-010d55023f64-kube-api-access-hsgtp\") pod \"nova-cell0-db-create-jps78\" (UID: \"e2280fa9-785d-467b-a50c-010d55023f64\") " pod="openstack/nova-cell0-db-create-jps78" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.007219 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsgtp\" (UniqueName: \"kubernetes.io/projected/e2280fa9-785d-467b-a50c-010d55023f64-kube-api-access-hsgtp\") pod \"nova-cell0-db-create-jps78\" (UID: \"e2280fa9-785d-467b-a50c-010d55023f64\") " pod="openstack/nova-cell0-db-create-jps78" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.016658 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcqwf\" (UniqueName: \"kubernetes.io/projected/e673e0f3-02fe-40fb-8fc5-8ab681f26068-kube-api-access-gcqwf\") pod \"nova-api-db-create-lbqsf\" (UID: \"e673e0f3-02fe-40fb-8fc5-8ab681f26068\") " pod="openstack/nova-api-db-create-lbqsf" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.017645 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lbqsf" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.058183 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-jnxdv"] Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.069596 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jnxdv" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.081596 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5qpf\" (UniqueName: \"kubernetes.io/projected/eab330ba-5ee5-43b8-b7a5-8c229c3447c3-kube-api-access-r5qpf\") pod \"nova-cell1-db-create-jnxdv\" (UID: \"eab330ba-5ee5-43b8-b7a5-8c229c3447c3\") " pod="openstack/nova-cell1-db-create-jnxdv" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.119688 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jps78" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.172320 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-jnxdv"] Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.184201 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5qpf\" (UniqueName: \"kubernetes.io/projected/eab330ba-5ee5-43b8-b7a5-8c229c3447c3-kube-api-access-r5qpf\") pod \"nova-cell1-db-create-jnxdv\" (UID: \"eab330ba-5ee5-43b8-b7a5-8c229c3447c3\") " pod="openstack/nova-cell1-db-create-jnxdv" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.213520 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5qpf\" (UniqueName: \"kubernetes.io/projected/eab330ba-5ee5-43b8-b7a5-8c229c3447c3-kube-api-access-r5qpf\") pod \"nova-cell1-db-create-jnxdv\" (UID: \"eab330ba-5ee5-43b8-b7a5-8c229c3447c3\") " pod="openstack/nova-cell1-db-create-jnxdv" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.302898 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.337366 4634 generic.go:334] "Generic (PLEG): container finished" podID="d4baf450-6210-4b45-9c46-810b190b1741" containerID="f5b0a49a7b4b03ac4db641c9c08dac48d13440ce16581479ce0d0f1f8e241aab" exitCode=0 Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.338595 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" event={"ID":"d4baf450-6210-4b45-9c46-810b190b1741","Type":"ContainerDied","Data":"f5b0a49a7b4b03ac4db641c9c08dac48d13440ce16581479ce0d0f1f8e241aab"} Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.383772 4634 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod75805ca7-b7e1-4c1a-a5ae-dae7c79b8b40] : Timed out while waiting for systemd to remove kubepods-besteffort-pod75805ca7_b7e1_4c1a_a5ae_dae7c79b8b40.slice" Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.387388 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:05:10 crc kubenswrapper[4634]: I0929 14:05:10.465585 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jnxdv" Sep 29 14:05:11 crc kubenswrapper[4634]: I0929 14:05:11.327569 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:05:11 crc kubenswrapper[4634]: I0929 14:05:11.350664 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerName="cinder-scheduler" containerID="cri-o://904a6af8671f5422d015207c4050d71ec1cbe3ade06c9cb0228e144977fc0a90" gracePeriod=30 Sep 29 14:05:11 crc kubenswrapper[4634]: I0929 14:05:11.351379 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerName="probe" containerID="cri-o://c1a185d440360438a56d988ecef931b8e483a3d534199678574eabb4afc3c501" gracePeriod=30 Sep 29 14:05:11 crc kubenswrapper[4634]: I0929 14:05:11.419703 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5bfb7db698-tmn8x" Sep 29 14:05:11 crc kubenswrapper[4634]: I0929 14:05:11.596326 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7bbc59f76-j4rjv" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 14:05:12 crc kubenswrapper[4634]: I0929 14:05:12.909308 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 14:05:13 crc kubenswrapper[4634]: I0929 14:05:13.378606 4634 generic.go:334] "Generic (PLEG): container finished" podID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerID="c1a185d440360438a56d988ecef931b8e483a3d534199678574eabb4afc3c501" exitCode=0 Sep 29 14:05:13 crc kubenswrapper[4634]: I0929 14:05:13.378665 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ab9885-b2c0-483d-901a-9f850dffd96f","Type":"ContainerDied","Data":"c1a185d440360438a56d988ecef931b8e483a3d534199678574eabb4afc3c501"} Sep 29 14:05:13 crc kubenswrapper[4634]: I0929 14:05:13.381561 4634 generic.go:334] "Generic (PLEG): container finished" podID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerID="234b74c8bccfa87df5a4ee49d1a3189478c634ddd98b00ae4dd119a84e8b2908" exitCode=0 Sep 29 14:05:13 crc kubenswrapper[4634]: I0929 14:05:13.381595 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54f5455b48-gwkmt" event={"ID":"2d7c0763-bf32-4ecb-b1bd-b989e492e943","Type":"ContainerDied","Data":"234b74c8bccfa87df5a4ee49d1a3189478c634ddd98b00ae4dd119a84e8b2908"} Sep 29 14:05:14 crc kubenswrapper[4634]: I0929 14:05:14.199539 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" podUID="d4baf450-6210-4b45-9c46-810b190b1741" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.162:5353: connect: connection refused" Sep 29 14:05:14 crc kubenswrapper[4634]: I0929 14:05:14.289717 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Sep 29 14:05:14 crc kubenswrapper[4634]: I0929 14:05:14.293159 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6b7c5bc546-zfttb" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.163:9311/healthcheck\": dial tcp 10.217.0.163:9311: connect: connection refused" Sep 29 14:05:14 crc kubenswrapper[4634]: I0929 14:05:14.394324 4634 generic.go:334] "Generic (PLEG): container finished" podID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerID="904a6af8671f5422d015207c4050d71ec1cbe3ade06c9cb0228e144977fc0a90" exitCode=0 Sep 29 14:05:14 crc kubenswrapper[4634]: I0929 14:05:14.394375 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ab9885-b2c0-483d-901a-9f850dffd96f","Type":"ContainerDied","Data":"904a6af8671f5422d015207c4050d71ec1cbe3ade06c9cb0228e144977fc0a90"} Sep 29 14:05:15 crc kubenswrapper[4634]: E0929 14:05:15.860839 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Sep 29 14:05:15 crc kubenswrapper[4634]: E0929 14:05:15.862181 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n569h9dh5dbh596h5f9hc7h556h575h64hcbh546hfbh76h5c9h65dh559h668hffh548h57h568h547h669h5fch9ch697h657h687hbfhd8h88h5f6q,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hg5bp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(041479d7-0e40-4b0c-b301-f79c133394dc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:05:15 crc kubenswrapper[4634]: E0929 14:05:15.864169 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="041479d7-0e40-4b0c-b301-f79c133394dc" Sep 29 14:05:16 crc kubenswrapper[4634]: E0929 14:05:16.422413 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="041479d7-0e40-4b0c-b301-f79c133394dc" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.646638 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.668925 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.674243 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773232 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-secret-key\") pod \"1427174f-e673-4c3e-bf36-f1463327fd61\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773299 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data-custom\") pod \"40ab9885-b2c0-483d-901a-9f850dffd96f\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773326 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-sg-core-conf-yaml\") pod \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773373 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data\") pod \"40ab9885-b2c0-483d-901a-9f850dffd96f\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773406 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-config-data\") pod \"1427174f-e673-4c3e-bf36-f1463327fd61\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773484 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-combined-ca-bundle\") pod \"40ab9885-b2c0-483d-901a-9f850dffd96f\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773521 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-tls-certs\") pod \"1427174f-e673-4c3e-bf36-f1463327fd61\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773555 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-scripts\") pod \"1427174f-e673-4c3e-bf36-f1463327fd61\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773600 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-scripts\") pod \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773629 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nvgb\" (UniqueName: \"kubernetes.io/projected/1427174f-e673-4c3e-bf36-f1463327fd61-kube-api-access-8nvgb\") pod \"1427174f-e673-4c3e-bf36-f1463327fd61\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773666 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-combined-ca-bundle\") pod \"1427174f-e673-4c3e-bf36-f1463327fd61\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773721 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csfmr\" (UniqueName: \"kubernetes.io/projected/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-kube-api-access-csfmr\") pod \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773757 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-combined-ca-bundle\") pod \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773825 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1427174f-e673-4c3e-bf36-f1463327fd61-logs\") pod \"1427174f-e673-4c3e-bf36-f1463327fd61\" (UID: \"1427174f-e673-4c3e-bf36-f1463327fd61\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773857 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-scripts\") pod \"40ab9885-b2c0-483d-901a-9f850dffd96f\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773876 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ab9885-b2c0-483d-901a-9f850dffd96f-etc-machine-id\") pod \"40ab9885-b2c0-483d-901a-9f850dffd96f\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.773937 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-config-data\") pod \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.774005 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtzfs\" (UniqueName: \"kubernetes.io/projected/40ab9885-b2c0-483d-901a-9f850dffd96f-kube-api-access-vtzfs\") pod \"40ab9885-b2c0-483d-901a-9f850dffd96f\" (UID: \"40ab9885-b2c0-483d-901a-9f850dffd96f\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.774071 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-log-httpd\") pod \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.774117 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-run-httpd\") pod \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\" (UID: \"061c94cb-cc6c-4a14-a0c4-4bcef38173b7\") " Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.775487 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "061c94cb-cc6c-4a14-a0c4-4bcef38173b7" (UID: "061c94cb-cc6c-4a14-a0c4-4bcef38173b7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.783206 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40ab9885-b2c0-483d-901a-9f850dffd96f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "40ab9885-b2c0-483d-901a-9f850dffd96f" (UID: "40ab9885-b2c0-483d-901a-9f850dffd96f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.799613 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1427174f-e673-4c3e-bf36-f1463327fd61-logs" (OuterVolumeSpecName: "logs") pod "1427174f-e673-4c3e-bf36-f1463327fd61" (UID: "1427174f-e673-4c3e-bf36-f1463327fd61"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.817259 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "40ab9885-b2c0-483d-901a-9f850dffd96f" (UID: "40ab9885-b2c0-483d-901a-9f850dffd96f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.817652 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "061c94cb-cc6c-4a14-a0c4-4bcef38173b7" (UID: "061c94cb-cc6c-4a14-a0c4-4bcef38173b7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.823826 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "061c94cb-cc6c-4a14-a0c4-4bcef38173b7" (UID: "061c94cb-cc6c-4a14-a0c4-4bcef38173b7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.824383 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "1427174f-e673-4c3e-bf36-f1463327fd61" (UID: "1427174f-e673-4c3e-bf36-f1463327fd61"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.845110 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1427174f-e673-4c3e-bf36-f1463327fd61-kube-api-access-8nvgb" (OuterVolumeSpecName: "kube-api-access-8nvgb") pod "1427174f-e673-4c3e-bf36-f1463327fd61" (UID: "1427174f-e673-4c3e-bf36-f1463327fd61"). InnerVolumeSpecName "kube-api-access-8nvgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.846434 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40ab9885-b2c0-483d-901a-9f850dffd96f-kube-api-access-vtzfs" (OuterVolumeSpecName: "kube-api-access-vtzfs") pod "40ab9885-b2c0-483d-901a-9f850dffd96f" (UID: "40ab9885-b2c0-483d-901a-9f850dffd96f"). InnerVolumeSpecName "kube-api-access-vtzfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.855435 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-kube-api-access-csfmr" (OuterVolumeSpecName: "kube-api-access-csfmr") pod "061c94cb-cc6c-4a14-a0c4-4bcef38173b7" (UID: "061c94cb-cc6c-4a14-a0c4-4bcef38173b7"). InnerVolumeSpecName "kube-api-access-csfmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.890763 4634 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.891713 4634 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.891787 4634 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.891855 4634 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.891915 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nvgb\" (UniqueName: \"kubernetes.io/projected/1427174f-e673-4c3e-bf36-f1463327fd61-kube-api-access-8nvgb\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.891970 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csfmr\" (UniqueName: \"kubernetes.io/projected/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-kube-api-access-csfmr\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.892040 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1427174f-e673-4c3e-bf36-f1463327fd61-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.892121 4634 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/40ab9885-b2c0-483d-901a-9f850dffd96f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.892179 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtzfs\" (UniqueName: \"kubernetes.io/projected/40ab9885-b2c0-483d-901a-9f850dffd96f-kube-api-access-vtzfs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.892254 4634 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.912285 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-scripts" (OuterVolumeSpecName: "scripts") pod "40ab9885-b2c0-483d-901a-9f850dffd96f" (UID: "40ab9885-b2c0-483d-901a-9f850dffd96f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.919350 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-scripts" (OuterVolumeSpecName: "scripts") pod "061c94cb-cc6c-4a14-a0c4-4bcef38173b7" (UID: "061c94cb-cc6c-4a14-a0c4-4bcef38173b7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.947933 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-config-data" (OuterVolumeSpecName: "config-data") pod "1427174f-e673-4c3e-bf36-f1463327fd61" (UID: "1427174f-e673-4c3e-bf36-f1463327fd61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:05:16 crc kubenswrapper[4634]: I0929 14:05:16.982199 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1427174f-e673-4c3e-bf36-f1463327fd61" (UID: "1427174f-e673-4c3e-bf36-f1463327fd61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:16.994665 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:16.994706 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:16.994721 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:16.994730 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.011774 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-scripts" (OuterVolumeSpecName: "scripts") pod "1427174f-e673-4c3e-bf36-f1463327fd61" (UID: "1427174f-e673-4c3e-bf36-f1463327fd61"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.072855 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40ab9885-b2c0-483d-901a-9f850dffd96f" (UID: "40ab9885-b2c0-483d-901a-9f850dffd96f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.102499 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.102539 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1427174f-e673-4c3e-bf36-f1463327fd61-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.139070 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "1427174f-e673-4c3e-bf36-f1463327fd61" (UID: "1427174f-e673-4c3e-bf36-f1463327fd61"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.169344 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "061c94cb-cc6c-4a14-a0c4-4bcef38173b7" (UID: "061c94cb-cc6c-4a14-a0c4-4bcef38173b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.180793 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.203748 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-sb\") pod \"d4baf450-6210-4b45-9c46-810b190b1741\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.203843 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-svc\") pod \"d4baf450-6210-4b45-9c46-810b190b1741\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.203864 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-swift-storage-0\") pod \"d4baf450-6210-4b45-9c46-810b190b1741\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.203886 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84cgv\" (UniqueName: \"kubernetes.io/projected/d4baf450-6210-4b45-9c46-810b190b1741-kube-api-access-84cgv\") pod \"d4baf450-6210-4b45-9c46-810b190b1741\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.203969 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-nb\") pod \"d4baf450-6210-4b45-9c46-810b190b1741\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.204005 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-config\") pod \"d4baf450-6210-4b45-9c46-810b190b1741\" (UID: \"d4baf450-6210-4b45-9c46-810b190b1741\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.247341 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data" (OuterVolumeSpecName: "config-data") pod "40ab9885-b2c0-483d-901a-9f850dffd96f" (UID: "40ab9885-b2c0-483d-901a-9f850dffd96f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.251190 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-config-data" (OuterVolumeSpecName: "config-data") pod "061c94cb-cc6c-4a14-a0c4-4bcef38173b7" (UID: "061c94cb-cc6c-4a14-a0c4-4bcef38173b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.282385 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4baf450-6210-4b45-9c46-810b190b1741-kube-api-access-84cgv" (OuterVolumeSpecName: "kube-api-access-84cgv") pod "d4baf450-6210-4b45-9c46-810b190b1741" (UID: "d4baf450-6210-4b45-9c46-810b190b1741"). InnerVolumeSpecName "kube-api-access-84cgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.294010 4634 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/1427174f-e673-4c3e-bf36-f1463327fd61-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.294103 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.395185 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d4baf450-6210-4b45-9c46-810b190b1741" (UID: "d4baf450-6210-4b45-9c46-810b190b1741"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.395837 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-config" (OuterVolumeSpecName: "config") pod "d4baf450-6210-4b45-9c46-810b190b1741" (UID: "d4baf450-6210-4b45-9c46-810b190b1741"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.402212 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d4baf450-6210-4b45-9c46-810b190b1741" (UID: "d4baf450-6210-4b45-9c46-810b190b1741"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.402648 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.402664 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.402677 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84cgv\" (UniqueName: \"kubernetes.io/projected/d4baf450-6210-4b45-9c46-810b190b1741-kube-api-access-84cgv\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.402688 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40ab9885-b2c0-483d-901a-9f850dffd96f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.402699 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.402708 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/061c94cb-cc6c-4a14-a0c4-4bcef38173b7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.432296 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d4baf450-6210-4b45-9c46-810b190b1741" (UID: "d4baf450-6210-4b45-9c46-810b190b1741"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.434786 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d4baf450-6210-4b45-9c46-810b190b1741" (UID: "d4baf450-6210-4b45-9c46-810b190b1741"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.465307 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"061c94cb-cc6c-4a14-a0c4-4bcef38173b7","Type":"ContainerDied","Data":"cba87749d56a9bef2541cb5db565769ce28f4f1e0b05a4b8a954a62216223c33"} Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.465371 4634 scope.go:117] "RemoveContainer" containerID="f3a0d79b872cc4c22a86229d575f404459460f94214c1109ef0ffee698ef1de4" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.465528 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.493976 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" event={"ID":"d4baf450-6210-4b45-9c46-810b190b1741","Type":"ContainerDied","Data":"603d4ddf94e89a69d92f9dc672a7cbc03008bff2c54ae282877ff7af03d34cd6"} Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.495708 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-jddgc" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.507700 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.507726 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4baf450-6210-4b45-9c46-810b190b1741-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.515346 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"40ab9885-b2c0-483d-901a-9f850dffd96f","Type":"ContainerDied","Data":"f48aee11d689818f9aa513175c51b734584866d9545e7c095fc8ff5f4d00d50e"} Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.515442 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.533414 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7bbc59f76-j4rjv" event={"ID":"1427174f-e673-4c3e-bf36-f1463327fd61","Type":"ContainerDied","Data":"22b15cb39178373aaae412638951a5164f4062fbb550a96feaa3b70bdb0e1f20"} Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.533655 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7bbc59f76-j4rjv" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.536170 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.557911 4634 scope.go:117] "RemoveContainer" containerID="0048fa50e8fa68718e3de30d8b2da6272a12b039e412b574a35f901fb82a98a5" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.565586 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.683419 4634 scope.go:117] "RemoveContainer" containerID="f5b0a49a7b4b03ac4db641c9c08dac48d13440ce16581479ce0d0f1f8e241aab" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.738522 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvzpp\" (UniqueName: \"kubernetes.io/projected/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-kube-api-access-xvzpp\") pod \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.738612 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gm2c5\" (UniqueName: \"kubernetes.io/projected/2d7c0763-bf32-4ecb-b1bd-b989e492e943-kube-api-access-gm2c5\") pod \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.738681 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-ovndb-tls-certs\") pod \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.738739 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-httpd-config\") pod \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.738778 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data\") pod \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.738884 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-config\") pod \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.738934 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-logs\") pod \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.738961 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-combined-ca-bundle\") pod \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\" (UID: \"2d7c0763-bf32-4ecb-b1bd-b989e492e943\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.739018 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-combined-ca-bundle\") pod \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.739042 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data-custom\") pod \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\" (UID: \"845a9190-05ae-4893-a7e3-d4bcaa7d8d53\") " Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.748017 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-logs" (OuterVolumeSpecName: "logs") pod "845a9190-05ae-4893-a7e3-d4bcaa7d8d53" (UID: "845a9190-05ae-4893-a7e3-d4bcaa7d8d53"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.751206 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jps78"] Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.768144 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jddgc"] Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.768758 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-kube-api-access-xvzpp" (OuterVolumeSpecName: "kube-api-access-xvzpp") pod "845a9190-05ae-4893-a7e3-d4bcaa7d8d53" (UID: "845a9190-05ae-4893-a7e3-d4bcaa7d8d53"). InnerVolumeSpecName "kube-api-access-xvzpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.839338 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "845a9190-05ae-4893-a7e3-d4bcaa7d8d53" (UID: "845a9190-05ae-4893-a7e3-d4bcaa7d8d53"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.842848 4634 scope.go:117] "RemoveContainer" containerID="0f6c9c0c9bab68227dc882d7aaa3f2bc6901e6fbba9bf5c926069958734387ff" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.858273 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d7c0763-bf32-4ecb-b1bd-b989e492e943-kube-api-access-gm2c5" (OuterVolumeSpecName: "kube-api-access-gm2c5") pod "2d7c0763-bf32-4ecb-b1bd-b989e492e943" (UID: "2d7c0763-bf32-4ecb-b1bd-b989e492e943"). InnerVolumeSpecName "kube-api-access-gm2c5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.874427 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "2d7c0763-bf32-4ecb-b1bd-b989e492e943" (UID: "2d7c0763-bf32-4ecb-b1bd-b989e492e943"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.882352 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-jddgc"] Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.883840 4634 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.883870 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvzpp\" (UniqueName: \"kubernetes.io/projected/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-kube-api-access-xvzpp\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.883882 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gm2c5\" (UniqueName: \"kubernetes.io/projected/2d7c0763-bf32-4ecb-b1bd-b989e492e943-kube-api-access-gm2c5\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.883893 4634 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.883902 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.885241 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "845a9190-05ae-4893-a7e3-d4bcaa7d8d53" (UID: "845a9190-05ae-4893-a7e3-d4bcaa7d8d53"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.960978 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7bbc59f76-j4rjv"] Sep 29 14:05:17 crc kubenswrapper[4634]: I0929 14:05:17.989717 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.013656 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7bbc59f76-j4rjv"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.042421 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-jnxdv"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.060259 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-config" (OuterVolumeSpecName: "config") pod "2d7c0763-bf32-4ecb-b1bd-b989e492e943" (UID: "2d7c0763-bf32-4ecb-b1bd-b989e492e943"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.076073 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data" (OuterVolumeSpecName: "config-data") pod "845a9190-05ae-4893-a7e3-d4bcaa7d8d53" (UID: "845a9190-05ae-4893-a7e3-d4bcaa7d8d53"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.089853 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d7c0763-bf32-4ecb-b1bd-b989e492e943" (UID: "2d7c0763-bf32-4ecb-b1bd-b989e492e943"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.095656 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845a9190-05ae-4893-a7e3-d4bcaa7d8d53-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.095685 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.095694 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.095713 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.132747 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "2d7c0763-bf32-4ecb-b1bd-b989e492e943" (UID: "2d7c0763-bf32-4ecb-b1bd-b989e492e943"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.145738 4634 scope.go:117] "RemoveContainer" containerID="c1a185d440360438a56d988ecef931b8e483a3d534199678574eabb4afc3c501" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.148141 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" path="/var/lib/kubelet/pods/1427174f-e673-4c3e-bf36-f1463327fd61/volumes" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.148819 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4baf450-6210-4b45-9c46-810b190b1741" path="/var/lib/kubelet/pods/d4baf450-6210-4b45-9c46-810b190b1741/volumes" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.150293 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.190319 4634 scope.go:117] "RemoveContainer" containerID="904a6af8671f5422d015207c4050d71ec1cbe3ade06c9cb0228e144977fc0a90" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193401 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.193830 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerName="probe" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193862 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerName="probe" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.193881 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193888 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.193896 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="proxy-httpd" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193901 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="proxy-httpd" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.193907 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4baf450-6210-4b45-9c46-810b190b1741" containerName="init" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193913 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4baf450-6210-4b45-9c46-810b190b1741" containerName="init" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.193935 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerName="neutron-httpd" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193940 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerName="neutron-httpd" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.193949 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon-log" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193956 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon-log" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.193969 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193975 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.193994 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4baf450-6210-4b45-9c46-810b190b1741" containerName="dnsmasq-dns" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.193999 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4baf450-6210-4b45-9c46-810b190b1741" containerName="dnsmasq-dns" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.194006 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerName="cinder-scheduler" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194012 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerName="cinder-scheduler" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.194023 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerName="neutron-api" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194029 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerName="neutron-api" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.194045 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194050 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" Sep 29 14:05:18 crc kubenswrapper[4634]: E0929 14:05:18.194063 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="ceilometer-notification-agent" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194069 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="ceilometer-notification-agent" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194250 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon-log" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194269 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1427174f-e673-4c3e-bf36-f1463327fd61" containerName="horizon" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194276 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerName="probe" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194288 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="ceilometer-notification-agent" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194297 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" containerName="cinder-scheduler" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194305 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" containerName="proxy-httpd" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194316 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerName="neutron-httpd" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194324 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api-log" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194332 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4baf450-6210-4b45-9c46-810b190b1741" containerName="dnsmasq-dns" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194339 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" containerName="neutron-api" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.194349 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" containerName="barbican-api" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.195454 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.199344 4634 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d7c0763-bf32-4ecb-b1bd-b989e492e943-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.200002 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.211211 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.217685 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.225355 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.239987 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.242473 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.245241 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.245737 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.256271 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-lbqsf"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.258880 4634 scope.go:117] "RemoveContainer" containerID="fd599d2a47dc10f2251e1e9bfc707c55aaa8162d18f2e03f6679c324016f8548" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.287062 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.300969 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03850223-163a-4eca-a290-1d072a2b535d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301020 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-log-httpd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301044 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-run-httpd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301066 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkdfd\" (UniqueName: \"kubernetes.io/projected/e14db37b-1e3b-4faf-b73d-339e69007677-kube-api-access-zkdfd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301096 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301116 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301167 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-config-data\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301191 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-scripts\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301206 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301226 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-scripts\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301267 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2n85\" (UniqueName: \"kubernetes.io/projected/03850223-163a-4eca-a290-1d072a2b535d-kube-api-access-n2n85\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.301284 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-config-data\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.302039 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.330024 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-847d5655ff-zzkf2"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.405463 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2n85\" (UniqueName: \"kubernetes.io/projected/03850223-163a-4eca-a290-1d072a2b535d-kube-api-access-n2n85\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.405517 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-config-data\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.405568 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406213 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03850223-163a-4eca-a290-1d072a2b535d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406244 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-log-httpd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406262 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-run-httpd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406279 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkdfd\" (UniqueName: \"kubernetes.io/projected/e14db37b-1e3b-4faf-b73d-339e69007677-kube-api-access-zkdfd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406293 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406316 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406367 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-config-data\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406396 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-scripts\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406417 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406446 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-scripts\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.406679 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-log-httpd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.411860 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-run-httpd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.412185 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/03850223-163a-4eca-a290-1d072a2b535d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.412995 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-config-data\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.414279 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.419901 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-scripts\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.421249 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-scripts\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.422308 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-config-data\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.436393 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.436926 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkdfd\" (UniqueName: \"kubernetes.io/projected/e14db37b-1e3b-4faf-b73d-339e69007677-kube-api-access-zkdfd\") pod \"ceilometer-0\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.440647 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2n85\" (UniqueName: \"kubernetes.io/projected/03850223-163a-4eca-a290-1d072a2b535d-kube-api-access-n2n85\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.441155 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.441341 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03850223-163a-4eca-a290-1d072a2b535d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"03850223-163a-4eca-a290-1d072a2b535d\") " pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.494556 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.494883 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerName="glance-log" containerID="cri-o://43b95836ae5ded67751cfa89e26f9bc1af003788323af2b3fce3cebe37de45d1" gracePeriod=30 Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.495281 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerName="glance-httpd" containerID="cri-o://d693106158ca88657de75dd1486e13e25c292e0a766989d4b2a7bd85fbd84a1a" gracePeriod=30 Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.549990 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.562046 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.564413 4634 scope.go:117] "RemoveContainer" containerID="7dbc773a851935823ae057d5f45287b8cb997b84d3d50b9ecf58704e2fd62dd0" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.570260 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-847d5655ff-zzkf2" event={"ID":"eb9f537d-9c82-4675-aeaf-c0e4656a1330","Type":"ContainerStarted","Data":"a59a7d05923aa056d26e2f3d2797ad00f68ab40d49ff789e3f9f46c916818568"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.575064 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jps78" event={"ID":"e2280fa9-785d-467b-a50c-010d55023f64","Type":"ContainerStarted","Data":"8b93dc62bae995a6ba3c042ce8aeef8879e8ee6a80ac5b251633cadfc95d412e"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.575379 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jps78" event={"ID":"e2280fa9-785d-467b-a50c-010d55023f64","Type":"ContainerStarted","Data":"8695fc83af0880544f1ea9e8bc19bd7734990698314060a3a3463e3e669b5744"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.579399 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lbqsf" event={"ID":"e673e0f3-02fe-40fb-8fc5-8ab681f26068","Type":"ContainerStarted","Data":"9829540ad9feff8aacb633083bd2ae8e56f580e4e92ce0f17976cbe16c053914"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.579440 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lbqsf" event={"ID":"e673e0f3-02fe-40fb-8fc5-8ab681f26068","Type":"ContainerStarted","Data":"f2be33279a49ed13d4589747ceebafd40c0d6ad4391d77ba3e5c943007601d96"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.588659 4634 generic.go:334] "Generic (PLEG): container finished" podID="eab330ba-5ee5-43b8-b7a5-8c229c3447c3" containerID="c229fa6418aa7fb8d33facb2cf9b9e06e03ed5f62b0b6153d961c84ad1017816" exitCode=0 Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.588753 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jnxdv" event={"ID":"eab330ba-5ee5-43b8-b7a5-8c229c3447c3","Type":"ContainerDied","Data":"c229fa6418aa7fb8d33facb2cf9b9e06e03ed5f62b0b6153d961c84ad1017816"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.588782 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jnxdv" event={"ID":"eab330ba-5ee5-43b8-b7a5-8c229c3447c3","Type":"ContainerStarted","Data":"fad08bc20282651c367f56d87c75d0f577af44a8dbb839a26a1582c1c8c079d4"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.607650 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-54f5455b48-gwkmt" event={"ID":"2d7c0763-bf32-4ecb-b1bd-b989e492e943","Type":"ContainerDied","Data":"ddffcd9c479eaf360ebbce8ddf24df626192e6ff0e065d44b3f7eeea0939b7ad"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.607759 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-54f5455b48-gwkmt" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.612234 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-jps78" podStartSLOduration=9.612189772 podStartE2EDuration="9.612189772s" podCreationTimestamp="2025-09-29 14:05:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:18.599501481 +0000 UTC m=+1249.168229230" watchObservedRunningTime="2025-09-29 14:05:18.612189772 +0000 UTC m=+1249.180917521" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.643392 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6b7c5bc546-zfttb" event={"ID":"845a9190-05ae-4893-a7e3-d4bcaa7d8d53","Type":"ContainerDied","Data":"53c15d0993634bdaa999eef9b9dbfcaa9790084c3c51944263481692cfb618a6"} Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.643723 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6b7c5bc546-zfttb" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.699880 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-54f5455b48-gwkmt"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.725893 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-54f5455b48-gwkmt"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.749393 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6b7c5bc546-zfttb"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.757157 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6b7c5bc546-zfttb"] Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.757386 4634 scope.go:117] "RemoveContainer" containerID="c99e5c0faef3b7c8c5ecbf0919ee165ee2274be3982d3263030eb303399e6d9d" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.851723 4634 scope.go:117] "RemoveContainer" containerID="234b74c8bccfa87df5a4ee49d1a3189478c634ddd98b00ae4dd119a84e8b2908" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.941960 4634 scope.go:117] "RemoveContainer" containerID="bcdb05a5274428257047e4b7630d9d63c64ed44adcdbd3938884f86db208a259" Sep 29 14:05:18 crc kubenswrapper[4634]: I0929 14:05:18.986181 4634 scope.go:117] "RemoveContainer" containerID="04ad20588640ae84f0a4b506764f8f9f7858baefcada5c481aebc89d21c4b0fc" Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.314054 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.482831 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.725709 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-847d5655ff-zzkf2" event={"ID":"eb9f537d-9c82-4675-aeaf-c0e4656a1330","Type":"ContainerStarted","Data":"3dca1ab68db4239fbc790b9f44908f6ebabe413aacbb795d6ea84252a3771eb0"} Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.725775 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-847d5655ff-zzkf2" event={"ID":"eb9f537d-9c82-4675-aeaf-c0e4656a1330","Type":"ContainerStarted","Data":"36461e4a3e7d1c17ff75c2aebdfb73cb070092e3c8b8f1831881a5ee6b303ec3"} Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.727563 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.727594 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.734973 4634 generic.go:334] "Generic (PLEG): container finished" podID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerID="43b95836ae5ded67751cfa89e26f9bc1af003788323af2b3fce3cebe37de45d1" exitCode=143 Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.735259 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b","Type":"ContainerDied","Data":"43b95836ae5ded67751cfa89e26f9bc1af003788323af2b3fce3cebe37de45d1"} Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.738700 4634 generic.go:334] "Generic (PLEG): container finished" podID="e2280fa9-785d-467b-a50c-010d55023f64" containerID="8b93dc62bae995a6ba3c042ce8aeef8879e8ee6a80ac5b251633cadfc95d412e" exitCode=0 Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.739041 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jps78" event={"ID":"e2280fa9-785d-467b-a50c-010d55023f64","Type":"ContainerDied","Data":"8b93dc62bae995a6ba3c042ce8aeef8879e8ee6a80ac5b251633cadfc95d412e"} Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.752123 4634 generic.go:334] "Generic (PLEG): container finished" podID="e673e0f3-02fe-40fb-8fc5-8ab681f26068" containerID="9829540ad9feff8aacb633083bd2ae8e56f580e4e92ce0f17976cbe16c053914" exitCode=0 Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.752280 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lbqsf" event={"ID":"e673e0f3-02fe-40fb-8fc5-8ab681f26068","Type":"ContainerDied","Data":"9829540ad9feff8aacb633083bd2ae8e56f580e4e92ce0f17976cbe16c053914"} Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.754473 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-847d5655ff-zzkf2" podStartSLOduration=13.754449954 podStartE2EDuration="13.754449954s" podCreationTimestamp="2025-09-29 14:05:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:19.746772463 +0000 UTC m=+1250.315500202" watchObservedRunningTime="2025-09-29 14:05:19.754449954 +0000 UTC m=+1250.323177703" Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.767026 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"03850223-163a-4eca-a290-1d072a2b535d","Type":"ContainerStarted","Data":"74f0441c6d9df2e613c471f85ea8efc0a9aa675b6d8d49423aa344ff75992dc6"} Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.772647 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerStarted","Data":"8e924bbdd83c3612042ad4606a6a6ca43cb4a4e697d1e885ade28a50b159aa75"} Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.911247 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.911632 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerName="glance-log" containerID="cri-o://ff5795357b5aa7fe51d9efa1132531504898e060de408f369c5fad52016d304d" gracePeriod=30 Sep 29 14:05:19 crc kubenswrapper[4634]: I0929 14:05:19.912224 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerName="glance-httpd" containerID="cri-o://2927dd651f052970082f59949258c928b3afb3883520ecfdd71d09f10a29dee0" gracePeriod=30 Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.196736 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="061c94cb-cc6c-4a14-a0c4-4bcef38173b7" path="/var/lib/kubelet/pods/061c94cb-cc6c-4a14-a0c4-4bcef38173b7/volumes" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.197754 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d7c0763-bf32-4ecb-b1bd-b989e492e943" path="/var/lib/kubelet/pods/2d7c0763-bf32-4ecb-b1bd-b989e492e943/volumes" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.198305 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40ab9885-b2c0-483d-901a-9f850dffd96f" path="/var/lib/kubelet/pods/40ab9885-b2c0-483d-901a-9f850dffd96f/volumes" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.203948 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="845a9190-05ae-4893-a7e3-d4bcaa7d8d53" path="/var/lib/kubelet/pods/845a9190-05ae-4893-a7e3-d4bcaa7d8d53/volumes" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.447702 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lbqsf" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.538251 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcqwf\" (UniqueName: \"kubernetes.io/projected/e673e0f3-02fe-40fb-8fc5-8ab681f26068-kube-api-access-gcqwf\") pod \"e673e0f3-02fe-40fb-8fc5-8ab681f26068\" (UID: \"e673e0f3-02fe-40fb-8fc5-8ab681f26068\") " Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.559647 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e673e0f3-02fe-40fb-8fc5-8ab681f26068-kube-api-access-gcqwf" (OuterVolumeSpecName: "kube-api-access-gcqwf") pod "e673e0f3-02fe-40fb-8fc5-8ab681f26068" (UID: "e673e0f3-02fe-40fb-8fc5-8ab681f26068"). InnerVolumeSpecName "kube-api-access-gcqwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.653587 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcqwf\" (UniqueName: \"kubernetes.io/projected/e673e0f3-02fe-40fb-8fc5-8ab681f26068-kube-api-access-gcqwf\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.717853 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jnxdv" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.824493 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-lbqsf" event={"ID":"e673e0f3-02fe-40fb-8fc5-8ab681f26068","Type":"ContainerDied","Data":"f2be33279a49ed13d4589747ceebafd40c0d6ad4391d77ba3e5c943007601d96"} Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.824548 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2be33279a49ed13d4589747ceebafd40c0d6ad4391d77ba3e5c943007601d96" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.824719 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-lbqsf" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.859845 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5qpf\" (UniqueName: \"kubernetes.io/projected/eab330ba-5ee5-43b8-b7a5-8c229c3447c3-kube-api-access-r5qpf\") pod \"eab330ba-5ee5-43b8-b7a5-8c229c3447c3\" (UID: \"eab330ba-5ee5-43b8-b7a5-8c229c3447c3\") " Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.878134 4634 generic.go:334] "Generic (PLEG): container finished" podID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerID="ff5795357b5aa7fe51d9efa1132531504898e060de408f369c5fad52016d304d" exitCode=143 Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.878290 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ef284948-85a5-432e-b6a6-a57f988f02fc","Type":"ContainerDied","Data":"ff5795357b5aa7fe51d9efa1132531504898e060de408f369c5fad52016d304d"} Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.878438 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eab330ba-5ee5-43b8-b7a5-8c229c3447c3-kube-api-access-r5qpf" (OuterVolumeSpecName: "kube-api-access-r5qpf") pod "eab330ba-5ee5-43b8-b7a5-8c229c3447c3" (UID: "eab330ba-5ee5-43b8-b7a5-8c229c3447c3"). InnerVolumeSpecName "kube-api-access-r5qpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.906334 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-jnxdv" event={"ID":"eab330ba-5ee5-43b8-b7a5-8c229c3447c3","Type":"ContainerDied","Data":"fad08bc20282651c367f56d87c75d0f577af44a8dbb839a26a1582c1c8c079d4"} Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.906389 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fad08bc20282651c367f56d87c75d0f577af44a8dbb839a26a1582c1c8c079d4" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.906504 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-jnxdv" Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.931210 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerStarted","Data":"d6283604b2996f1109d294d0a48cdbb9ebca37aaf3020bb1f773194b60806636"} Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.953972 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"03850223-163a-4eca-a290-1d072a2b535d","Type":"ContainerStarted","Data":"ba5bcb4993d1d5203832162692a2689fc605da76aaafc5c4eda4fb86455d8a4b"} Sep 29 14:05:20 crc kubenswrapper[4634]: I0929 14:05:20.965242 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5qpf\" (UniqueName: \"kubernetes.io/projected/eab330ba-5ee5-43b8-b7a5-8c229c3447c3-kube-api-access-r5qpf\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:21 crc kubenswrapper[4634]: I0929 14:05:21.668041 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jps78" Sep 29 14:05:21 crc kubenswrapper[4634]: I0929 14:05:21.795648 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsgtp\" (UniqueName: \"kubernetes.io/projected/e2280fa9-785d-467b-a50c-010d55023f64-kube-api-access-hsgtp\") pod \"e2280fa9-785d-467b-a50c-010d55023f64\" (UID: \"e2280fa9-785d-467b-a50c-010d55023f64\") " Sep 29 14:05:21 crc kubenswrapper[4634]: I0929 14:05:21.803709 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2280fa9-785d-467b-a50c-010d55023f64-kube-api-access-hsgtp" (OuterVolumeSpecName: "kube-api-access-hsgtp") pod "e2280fa9-785d-467b-a50c-010d55023f64" (UID: "e2280fa9-785d-467b-a50c-010d55023f64"). InnerVolumeSpecName "kube-api-access-hsgtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:21 crc kubenswrapper[4634]: I0929 14:05:21.902060 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsgtp\" (UniqueName: \"kubernetes.io/projected/e2280fa9-785d-467b-a50c-010d55023f64-kube-api-access-hsgtp\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:21 crc kubenswrapper[4634]: I0929 14:05:21.985846 4634 generic.go:334] "Generic (PLEG): container finished" podID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerID="d693106158ca88657de75dd1486e13e25c292e0a766989d4b2a7bd85fbd84a1a" exitCode=0 Sep 29 14:05:21 crc kubenswrapper[4634]: I0929 14:05:21.985924 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b","Type":"ContainerDied","Data":"d693106158ca88657de75dd1486e13e25c292e0a766989d4b2a7bd85fbd84a1a"} Sep 29 14:05:21 crc kubenswrapper[4634]: I0929 14:05:21.998256 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"03850223-163a-4eca-a290-1d072a2b535d","Type":"ContainerStarted","Data":"8a0768ba3255225224f0fb1a8a522394b843e45e192c4188fdcbc621b864afc5"} Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.032140 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jps78" event={"ID":"e2280fa9-785d-467b-a50c-010d55023f64","Type":"ContainerDied","Data":"8695fc83af0880544f1ea9e8bc19bd7734990698314060a3a3463e3e669b5744"} Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.032190 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8695fc83af0880544f1ea9e8bc19bd7734990698314060a3a3463e3e669b5744" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.032303 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jps78" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.587681 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.629160 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.629136884 podStartE2EDuration="5.629136884s" podCreationTimestamp="2025-09-29 14:05:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:22.041754712 +0000 UTC m=+1252.610482461" watchObservedRunningTime="2025-09-29 14:05:22.629136884 +0000 UTC m=+1253.197864633" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.737214 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.737343 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-combined-ca-bundle\") pod \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.737413 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-public-tls-certs\") pod \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.737437 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-scripts\") pod \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.737465 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-config-data\") pod \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.737497 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2v4mk\" (UniqueName: \"kubernetes.io/projected/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-kube-api-access-2v4mk\") pod \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.737536 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-httpd-run\") pod \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.737578 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-logs\") pod \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\" (UID: \"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b\") " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.738556 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-logs" (OuterVolumeSpecName: "logs") pod "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" (UID: "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.745333 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" (UID: "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.764459 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" (UID: "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.775201 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-scripts" (OuterVolumeSpecName: "scripts") pod "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" (UID: "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.776164 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-kube-api-access-2v4mk" (OuterVolumeSpecName: "kube-api-access-2v4mk") pod "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" (UID: "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b"). InnerVolumeSpecName "kube-api-access-2v4mk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.839893 4634 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.839938 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.839971 4634 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.839984 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.839996 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2v4mk\" (UniqueName: \"kubernetes.io/projected/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-kube-api-access-2v4mk\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.842433 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" (UID: "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.900405 4634 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.938860 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-config-data" (OuterVolumeSpecName: "config-data") pod "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" (UID: "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.942372 4634 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.942410 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.942421 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:22 crc kubenswrapper[4634]: I0929 14:05:22.991226 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" (UID: "9b2378f2-6227-4121-b9cc-a8ecbda2ff6b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.044749 4634 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.059574 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerStarted","Data":"c06c4dba215290c1e79a8f27caafa928050949bd949e331ec11fc88b512ade8d"} Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.062242 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9b2378f2-6227-4121-b9cc-a8ecbda2ff6b","Type":"ContainerDied","Data":"827ec655154c4ef7f8c378435c16daf563b1488578cf4089da803fc5fdbb3d4f"} Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.062276 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.062328 4634 scope.go:117] "RemoveContainer" containerID="d693106158ca88657de75dd1486e13e25c292e0a766989d4b2a7bd85fbd84a1a" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.104328 4634 scope.go:117] "RemoveContainer" containerID="43b95836ae5ded67751cfa89e26f9bc1af003788323af2b3fce3cebe37de45d1" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.120998 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.136123 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.160739 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:05:23 crc kubenswrapper[4634]: E0929 14:05:23.161191 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e673e0f3-02fe-40fb-8fc5-8ab681f26068" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161212 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e673e0f3-02fe-40fb-8fc5-8ab681f26068" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: E0929 14:05:23.161232 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerName="glance-log" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161241 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerName="glance-log" Sep 29 14:05:23 crc kubenswrapper[4634]: E0929 14:05:23.161265 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2280fa9-785d-467b-a50c-010d55023f64" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161271 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2280fa9-785d-467b-a50c-010d55023f64" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: E0929 14:05:23.161282 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eab330ba-5ee5-43b8-b7a5-8c229c3447c3" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161288 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="eab330ba-5ee5-43b8-b7a5-8c229c3447c3" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: E0929 14:05:23.161301 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerName="glance-httpd" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161308 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerName="glance-httpd" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161472 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="eab330ba-5ee5-43b8-b7a5-8c229c3447c3" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161491 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e673e0f3-02fe-40fb-8fc5-8ab681f26068" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161503 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerName="glance-httpd" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161512 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" containerName="glance-log" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.161524 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2280fa9-785d-467b-a50c-010d55023f64" containerName="mariadb-database-create" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.162568 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.174477 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.174894 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.181909 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.260065 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.260161 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-config-data\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.260187 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/961f670c-7ab6-42b3-8fa1-b5494af46245-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.260263 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/961f670c-7ab6-42b3-8fa1-b5494af46245-logs\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.260285 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-scripts\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.260300 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.260344 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.260439 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qg6t\" (UniqueName: \"kubernetes.io/projected/961f670c-7ab6-42b3-8fa1-b5494af46245-kube-api-access-7qg6t\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.363357 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-config-data\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.363971 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/961f670c-7ab6-42b3-8fa1-b5494af46245-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.364113 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/961f670c-7ab6-42b3-8fa1-b5494af46245-logs\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.364141 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.364162 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-scripts\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.364189 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.364328 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qg6t\" (UniqueName: \"kubernetes.io/projected/961f670c-7ab6-42b3-8fa1-b5494af46245-kube-api-access-7qg6t\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.364431 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.367510 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.372461 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/961f670c-7ab6-42b3-8fa1-b5494af46245-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.376543 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-scripts\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.376875 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/961f670c-7ab6-42b3-8fa1-b5494af46245-logs\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.379794 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.387173 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-config-data\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.395886 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/961f670c-7ab6-42b3-8fa1-b5494af46245-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.401467 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qg6t\" (UniqueName: \"kubernetes.io/projected/961f670c-7ab6-42b3-8fa1-b5494af46245-kube-api-access-7qg6t\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.428770 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-external-api-0\" (UID: \"961f670c-7ab6-42b3-8fa1-b5494af46245\") " pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.487300 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 14:05:23 crc kubenswrapper[4634]: I0929 14:05:23.551152 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.173275 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b2378f2-6227-4121-b9cc-a8ecbda2ff6b" path="/var/lib/kubelet/pods/9b2378f2-6227-4121-b9cc-a8ecbda2ff6b/volumes" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.205663 4634 generic.go:334] "Generic (PLEG): container finished" podID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerID="2927dd651f052970082f59949258c928b3afb3883520ecfdd71d09f10a29dee0" exitCode=0 Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.205775 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ef284948-85a5-432e-b6a6-a57f988f02fc","Type":"ContainerDied","Data":"2927dd651f052970082f59949258c928b3afb3883520ecfdd71d09f10a29dee0"} Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.250293 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerStarted","Data":"3949adbb0d5efafc8981f02ca24a6d5e92096a34eede2758aafa1568ec3b34f6"} Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.388758 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.758614 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.821594 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-config-data\") pod \"ef284948-85a5-432e-b6a6-a57f988f02fc\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.821676 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-combined-ca-bundle\") pod \"ef284948-85a5-432e-b6a6-a57f988f02fc\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.821745 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ef284948-85a5-432e-b6a6-a57f988f02fc\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.821848 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-internal-tls-certs\") pod \"ef284948-85a5-432e-b6a6-a57f988f02fc\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.821891 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-httpd-run\") pod \"ef284948-85a5-432e-b6a6-a57f988f02fc\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.821930 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-scripts\") pod \"ef284948-85a5-432e-b6a6-a57f988f02fc\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.821990 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-logs\") pod \"ef284948-85a5-432e-b6a6-a57f988f02fc\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.822036 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9lnt\" (UniqueName: \"kubernetes.io/projected/ef284948-85a5-432e-b6a6-a57f988f02fc-kube-api-access-t9lnt\") pod \"ef284948-85a5-432e-b6a6-a57f988f02fc\" (UID: \"ef284948-85a5-432e-b6a6-a57f988f02fc\") " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.822592 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-logs" (OuterVolumeSpecName: "logs") pod "ef284948-85a5-432e-b6a6-a57f988f02fc" (UID: "ef284948-85a5-432e-b6a6-a57f988f02fc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.822751 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ef284948-85a5-432e-b6a6-a57f988f02fc" (UID: "ef284948-85a5-432e-b6a6-a57f988f02fc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.848449 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-scripts" (OuterVolumeSpecName: "scripts") pod "ef284948-85a5-432e-b6a6-a57f988f02fc" (UID: "ef284948-85a5-432e-b6a6-a57f988f02fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.848958 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef284948-85a5-432e-b6a6-a57f988f02fc-kube-api-access-t9lnt" (OuterVolumeSpecName: "kube-api-access-t9lnt") pod "ef284948-85a5-432e-b6a6-a57f988f02fc" (UID: "ef284948-85a5-432e-b6a6-a57f988f02fc"). InnerVolumeSpecName "kube-api-access-t9lnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.854350 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "ef284948-85a5-432e-b6a6-a57f988f02fc" (UID: "ef284948-85a5-432e-b6a6-a57f988f02fc"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.908596 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef284948-85a5-432e-b6a6-a57f988f02fc" (UID: "ef284948-85a5-432e-b6a6-a57f988f02fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.927266 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.927779 4634 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.927793 4634 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.927803 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.927817 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef284948-85a5-432e-b6a6-a57f988f02fc-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.927827 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9lnt\" (UniqueName: \"kubernetes.io/projected/ef284948-85a5-432e-b6a6-a57f988f02fc-kube-api-access-t9lnt\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:24 crc kubenswrapper[4634]: I0929 14:05:24.959727 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.168:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.005942 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ef284948-85a5-432e-b6a6-a57f988f02fc" (UID: "ef284948-85a5-432e-b6a6-a57f988f02fc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.029742 4634 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.030498 4634 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.132785 4634 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.187306 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-config-data" (OuterVolumeSpecName: "config-data") pod "ef284948-85a5-432e-b6a6-a57f988f02fc" (UID: "ef284948-85a5-432e-b6a6-a57f988f02fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.236351 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef284948-85a5-432e-b6a6-a57f988f02fc-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.307428 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"961f670c-7ab6-42b3-8fa1-b5494af46245","Type":"ContainerStarted","Data":"26230b93b7ae3b4f3397c0c4c70775806c4799c3dcf52bdebf760e0b4023c819"} Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.320557 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ef284948-85a5-432e-b6a6-a57f988f02fc","Type":"ContainerDied","Data":"dd4ebbc2cfd79ed3ba82d018fce73c409fd0a717cc9dd62678ab778c4c3bdb3f"} Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.321032 4634 scope.go:117] "RemoveContainer" containerID="2927dd651f052970082f59949258c928b3afb3883520ecfdd71d09f10a29dee0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.320744 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.381462 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.393743 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.421369 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:05:25 crc kubenswrapper[4634]: E0929 14:05:25.429841 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerName="glance-log" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.429889 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerName="glance-log" Sep 29 14:05:25 crc kubenswrapper[4634]: E0929 14:05:25.429922 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerName="glance-httpd" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.429929 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerName="glance-httpd" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.430162 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerName="glance-log" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.430181 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" containerName="glance-httpd" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.431289 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.438048 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.439583 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.451444 4634 scope.go:117] "RemoveContainer" containerID="ff5795357b5aa7fe51d9efa1132531504898e060de408f369c5fad52016d304d" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.459990 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.543253 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.543355 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.543682 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dc03219-407d-4010-9c0f-5bbf4d94da6a-logs\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.543907 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.543935 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3dc03219-407d-4010-9c0f-5bbf4d94da6a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.543967 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.544022 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v69r2\" (UniqueName: \"kubernetes.io/projected/3dc03219-407d-4010-9c0f-5bbf4d94da6a-kube-api-access-v69r2\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.544115 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.646228 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.646358 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.646462 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dc03219-407d-4010-9c0f-5bbf4d94da6a-logs\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.646533 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.646562 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3dc03219-407d-4010-9c0f-5bbf4d94da6a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.646587 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.646623 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v69r2\" (UniqueName: \"kubernetes.io/projected/3dc03219-407d-4010-9c0f-5bbf4d94da6a-kube-api-access-v69r2\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.646662 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.649010 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.651318 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3dc03219-407d-4010-9c0f-5bbf4d94da6a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.651553 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dc03219-407d-4010-9c0f-5bbf4d94da6a-logs\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.664519 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.666178 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.670348 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.688707 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v69r2\" (UniqueName: \"kubernetes.io/projected/3dc03219-407d-4010-9c0f-5bbf4d94da6a-kube-api-access-v69r2\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.688811 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dc03219-407d-4010-9c0f-5bbf4d94da6a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.713882 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-internal-api-0\" (UID: \"3dc03219-407d-4010-9c0f-5bbf4d94da6a\") " pod="openstack/glance-default-internal-api-0" Sep 29 14:05:25 crc kubenswrapper[4634]: I0929 14:05:25.790237 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:26 crc kubenswrapper[4634]: I0929 14:05:26.190196 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef284948-85a5-432e-b6a6-a57f988f02fc" path="/var/lib/kubelet/pods/ef284948-85a5-432e-b6a6-a57f988f02fc/volumes" Sep 29 14:05:26 crc kubenswrapper[4634]: I0929 14:05:26.350320 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerStarted","Data":"8de44334482e6d854028c144b93ba76c5ffa5bc28d7a35758cfb073ec168c112"} Sep 29 14:05:26 crc kubenswrapper[4634]: I0929 14:05:26.350800 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:05:26 crc kubenswrapper[4634]: I0929 14:05:26.355228 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"961f670c-7ab6-42b3-8fa1-b5494af46245","Type":"ContainerStarted","Data":"652f156821300cac91bbf2886a5cd1ea29abb67240a3be926920d01dd5d13d65"} Sep 29 14:05:26 crc kubenswrapper[4634]: I0929 14:05:26.400231 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.047352326 podStartE2EDuration="9.400209927s" podCreationTimestamp="2025-09-29 14:05:17 +0000 UTC" firstStartedPulling="2025-09-29 14:05:19.513136318 +0000 UTC m=+1250.081864057" lastFinishedPulling="2025-09-29 14:05:24.865993909 +0000 UTC m=+1255.434721658" observedRunningTime="2025-09-29 14:05:26.398777865 +0000 UTC m=+1256.967505624" watchObservedRunningTime="2025-09-29 14:05:26.400209927 +0000 UTC m=+1256.968937666" Sep 29 14:05:26 crc kubenswrapper[4634]: I0929 14:05:26.490641 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 14:05:27 crc kubenswrapper[4634]: I0929 14:05:27.355329 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:27 crc kubenswrapper[4634]: I0929 14:05:27.369820 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"961f670c-7ab6-42b3-8fa1-b5494af46245","Type":"ContainerStarted","Data":"8c213decdc94267d8a22ee609dc9b85918756f2c220752c8e2a662f2e2127c40"} Sep 29 14:05:27 crc kubenswrapper[4634]: I0929 14:05:27.371419 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3dc03219-407d-4010-9c0f-5bbf4d94da6a","Type":"ContainerStarted","Data":"3d3e4bf8a75060892e0b8bf5af240c8145843bc1cb54aa41db797ed6f18a917d"} Sep 29 14:05:27 crc kubenswrapper[4634]: I0929 14:05:27.390681 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:27 crc kubenswrapper[4634]: I0929 14:05:27.406431 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-847d5655ff-zzkf2" Sep 29 14:05:27 crc kubenswrapper[4634]: I0929 14:05:27.436564 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.436533713 podStartE2EDuration="4.436533713s" podCreationTimestamp="2025-09-29 14:05:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:27.405130675 +0000 UTC m=+1257.973858444" watchObservedRunningTime="2025-09-29 14:05:27.436533713 +0000 UTC m=+1258.005261462" Sep 29 14:05:28 crc kubenswrapper[4634]: I0929 14:05:28.410414 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3dc03219-407d-4010-9c0f-5bbf4d94da6a","Type":"ContainerStarted","Data":"71182f710db472e4b14c72a751fcc97b538bb8f22daadcf475f998724885731a"} Sep 29 14:05:28 crc kubenswrapper[4634]: I0929 14:05:28.410754 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="ceilometer-central-agent" containerID="cri-o://d6283604b2996f1109d294d0a48cdbb9ebca37aaf3020bb1f773194b60806636" gracePeriod=30 Sep 29 14:05:28 crc kubenswrapper[4634]: I0929 14:05:28.411263 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="proxy-httpd" containerID="cri-o://8de44334482e6d854028c144b93ba76c5ffa5bc28d7a35758cfb073ec168c112" gracePeriod=30 Sep 29 14:05:28 crc kubenswrapper[4634]: I0929 14:05:28.411358 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="sg-core" containerID="cri-o://3949adbb0d5efafc8981f02ca24a6d5e92096a34eede2758aafa1568ec3b34f6" gracePeriod=30 Sep 29 14:05:28 crc kubenswrapper[4634]: I0929 14:05:28.411361 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="ceilometer-notification-agent" containerID="cri-o://c06c4dba215290c1e79a8f27caafa928050949bd949e331ec11fc88b512ade8d" gracePeriod=30 Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.080159 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.422402 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3dc03219-407d-4010-9c0f-5bbf4d94da6a","Type":"ContainerStarted","Data":"7a13f4697ad16dcfe361bce7c0eba6388aa61b43b7faba7db66834197276f04b"} Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.432401 4634 generic.go:334] "Generic (PLEG): container finished" podID="e14db37b-1e3b-4faf-b73d-339e69007677" containerID="8de44334482e6d854028c144b93ba76c5ffa5bc28d7a35758cfb073ec168c112" exitCode=0 Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.432442 4634 generic.go:334] "Generic (PLEG): container finished" podID="e14db37b-1e3b-4faf-b73d-339e69007677" containerID="3949adbb0d5efafc8981f02ca24a6d5e92096a34eede2758aafa1568ec3b34f6" exitCode=2 Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.432450 4634 generic.go:334] "Generic (PLEG): container finished" podID="e14db37b-1e3b-4faf-b73d-339e69007677" containerID="c06c4dba215290c1e79a8f27caafa928050949bd949e331ec11fc88b512ade8d" exitCode=0 Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.432458 4634 generic.go:334] "Generic (PLEG): container finished" podID="e14db37b-1e3b-4faf-b73d-339e69007677" containerID="d6283604b2996f1109d294d0a48cdbb9ebca37aaf3020bb1f773194b60806636" exitCode=0 Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.432489 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerDied","Data":"8de44334482e6d854028c144b93ba76c5ffa5bc28d7a35758cfb073ec168c112"} Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.432526 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerDied","Data":"3949adbb0d5efafc8981f02ca24a6d5e92096a34eede2758aafa1568ec3b34f6"} Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.432536 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerDied","Data":"c06c4dba215290c1e79a8f27caafa928050949bd949e331ec11fc88b512ade8d"} Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.432547 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerDied","Data":"d6283604b2996f1109d294d0a48cdbb9ebca37aaf3020bb1f773194b60806636"} Sep 29 14:05:29 crc kubenswrapper[4634]: I0929 14:05:29.458297 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.458270274 podStartE2EDuration="4.458270274s" podCreationTimestamp="2025-09-29 14:05:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:29.457584849 +0000 UTC m=+1260.026312608" watchObservedRunningTime="2025-09-29 14:05:29.458270274 +0000 UTC m=+1260.026998013" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.002712 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-f016-account-create-srmqs"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.006307 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f016-account-create-srmqs" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.009795 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.232475 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcrr9\" (UniqueName: \"kubernetes.io/projected/5d5b014b-6225-45cf-afcd-4263a6347f2d-kube-api-access-jcrr9\") pod \"nova-api-f016-account-create-srmqs\" (UID: \"5d5b014b-6225-45cf-afcd-4263a6347f2d\") " pod="openstack/nova-api-f016-account-create-srmqs" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.240586 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-f016-account-create-srmqs"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.246506 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.263595 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-d98c-account-create-2f7ww"] Sep 29 14:05:30 crc kubenswrapper[4634]: E0929 14:05:30.265002 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="ceilometer-notification-agent" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.265035 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="ceilometer-notification-agent" Sep 29 14:05:30 crc kubenswrapper[4634]: E0929 14:05:30.265108 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="proxy-httpd" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.265120 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="proxy-httpd" Sep 29 14:05:30 crc kubenswrapper[4634]: E0929 14:05:30.265136 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="ceilometer-central-agent" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.265152 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="ceilometer-central-agent" Sep 29 14:05:30 crc kubenswrapper[4634]: E0929 14:05:30.265180 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="sg-core" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.268685 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="sg-core" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.269238 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="ceilometer-central-agent" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.269273 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="sg-core" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.269298 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="ceilometer-notification-agent" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.269322 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" containerName="proxy-httpd" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.270358 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d98c-account-create-2f7ww" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.273343 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.296866 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d98c-account-create-2f7ww"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.335329 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcrr9\" (UniqueName: \"kubernetes.io/projected/5d5b014b-6225-45cf-afcd-4263a6347f2d-kube-api-access-jcrr9\") pod \"nova-api-f016-account-create-srmqs\" (UID: \"5d5b014b-6225-45cf-afcd-4263a6347f2d\") " pod="openstack/nova-api-f016-account-create-srmqs" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.383513 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcrr9\" (UniqueName: \"kubernetes.io/projected/5d5b014b-6225-45cf-afcd-4263a6347f2d-kube-api-access-jcrr9\") pod \"nova-api-f016-account-create-srmqs\" (UID: \"5d5b014b-6225-45cf-afcd-4263a6347f2d\") " pod="openstack/nova-api-f016-account-create-srmqs" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.405577 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-a71a-account-create-2d57l"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.407174 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-a71a-account-create-2d57l" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.411572 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.431857 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-a71a-account-create-2d57l"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.436317 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkdfd\" (UniqueName: \"kubernetes.io/projected/e14db37b-1e3b-4faf-b73d-339e69007677-kube-api-access-zkdfd\") pod \"e14db37b-1e3b-4faf-b73d-339e69007677\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.436781 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-run-httpd\") pod \"e14db37b-1e3b-4faf-b73d-339e69007677\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.436839 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-log-httpd\") pod \"e14db37b-1e3b-4faf-b73d-339e69007677\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.437035 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-sg-core-conf-yaml\") pod \"e14db37b-1e3b-4faf-b73d-339e69007677\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.437162 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-combined-ca-bundle\") pod \"e14db37b-1e3b-4faf-b73d-339e69007677\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.437195 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-config-data\") pod \"e14db37b-1e3b-4faf-b73d-339e69007677\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.437200 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e14db37b-1e3b-4faf-b73d-339e69007677" (UID: "e14db37b-1e3b-4faf-b73d-339e69007677"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.437297 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-scripts\") pod \"e14db37b-1e3b-4faf-b73d-339e69007677\" (UID: \"e14db37b-1e3b-4faf-b73d-339e69007677\") " Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.437883 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e14db37b-1e3b-4faf-b73d-339e69007677" (UID: "e14db37b-1e3b-4faf-b73d-339e69007677"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.438771 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnnvb\" (UniqueName: \"kubernetes.io/projected/2e65b300-4b51-45f3-8a31-9a95755cb81a-kube-api-access-jnnvb\") pod \"nova-cell0-d98c-account-create-2f7ww\" (UID: \"2e65b300-4b51-45f3-8a31-9a95755cb81a\") " pod="openstack/nova-cell0-d98c-account-create-2f7ww" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.438915 4634 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.438936 4634 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e14db37b-1e3b-4faf-b73d-339e69007677-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.462682 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-scripts" (OuterVolumeSpecName: "scripts") pod "e14db37b-1e3b-4faf-b73d-339e69007677" (UID: "e14db37b-1e3b-4faf-b73d-339e69007677"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.479394 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e14db37b-1e3b-4faf-b73d-339e69007677-kube-api-access-zkdfd" (OuterVolumeSpecName: "kube-api-access-zkdfd") pod "e14db37b-1e3b-4faf-b73d-339e69007677" (UID: "e14db37b-1e3b-4faf-b73d-339e69007677"). InnerVolumeSpecName "kube-api-access-zkdfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.488987 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.489970 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e14db37b-1e3b-4faf-b73d-339e69007677","Type":"ContainerDied","Data":"8e924bbdd83c3612042ad4606a6a6ca43cb4a4e697d1e885ade28a50b159aa75"} Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.490124 4634 scope.go:117] "RemoveContainer" containerID="8de44334482e6d854028c144b93ba76c5ffa5bc28d7a35758cfb073ec168c112" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.506417 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f016-account-create-srmqs" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.533316 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e14db37b-1e3b-4faf-b73d-339e69007677" (UID: "e14db37b-1e3b-4faf-b73d-339e69007677"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.542626 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gfnh\" (UniqueName: \"kubernetes.io/projected/575fd438-a63c-4fe1-8bc7-e8d293176ec0-kube-api-access-7gfnh\") pod \"nova-cell1-a71a-account-create-2d57l\" (UID: \"575fd438-a63c-4fe1-8bc7-e8d293176ec0\") " pod="openstack/nova-cell1-a71a-account-create-2d57l" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.542944 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnnvb\" (UniqueName: \"kubernetes.io/projected/2e65b300-4b51-45f3-8a31-9a95755cb81a-kube-api-access-jnnvb\") pod \"nova-cell0-d98c-account-create-2f7ww\" (UID: \"2e65b300-4b51-45f3-8a31-9a95755cb81a\") " pod="openstack/nova-cell0-d98c-account-create-2f7ww" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.543274 4634 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.543335 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.543357 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkdfd\" (UniqueName: \"kubernetes.io/projected/e14db37b-1e3b-4faf-b73d-339e69007677-kube-api-access-zkdfd\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.575696 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnnvb\" (UniqueName: \"kubernetes.io/projected/2e65b300-4b51-45f3-8a31-9a95755cb81a-kube-api-access-jnnvb\") pod \"nova-cell0-d98c-account-create-2f7ww\" (UID: \"2e65b300-4b51-45f3-8a31-9a95755cb81a\") " pod="openstack/nova-cell0-d98c-account-create-2f7ww" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.589035 4634 scope.go:117] "RemoveContainer" containerID="3949adbb0d5efafc8981f02ca24a6d5e92096a34eede2758aafa1568ec3b34f6" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.608647 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d98c-account-create-2f7ww" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.646590 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gfnh\" (UniqueName: \"kubernetes.io/projected/575fd438-a63c-4fe1-8bc7-e8d293176ec0-kube-api-access-7gfnh\") pod \"nova-cell1-a71a-account-create-2d57l\" (UID: \"575fd438-a63c-4fe1-8bc7-e8d293176ec0\") " pod="openstack/nova-cell1-a71a-account-create-2d57l" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.702485 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gfnh\" (UniqueName: \"kubernetes.io/projected/575fd438-a63c-4fe1-8bc7-e8d293176ec0-kube-api-access-7gfnh\") pod \"nova-cell1-a71a-account-create-2d57l\" (UID: \"575fd438-a63c-4fe1-8bc7-e8d293176ec0\") " pod="openstack/nova-cell1-a71a-account-create-2d57l" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.737769 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-a71a-account-create-2d57l" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.745618 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e14db37b-1e3b-4faf-b73d-339e69007677" (UID: "e14db37b-1e3b-4faf-b73d-339e69007677"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.749897 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.754922 4634 scope.go:117] "RemoveContainer" containerID="c06c4dba215290c1e79a8f27caafa928050949bd949e331ec11fc88b512ade8d" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.763896 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-config-data" (OuterVolumeSpecName: "config-data") pod "e14db37b-1e3b-4faf-b73d-339e69007677" (UID: "e14db37b-1e3b-4faf-b73d-339e69007677"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.855210 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e14db37b-1e3b-4faf-b73d-339e69007677-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.882638 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.885979 4634 scope.go:117] "RemoveContainer" containerID="d6283604b2996f1109d294d0a48cdbb9ebca37aaf3020bb1f773194b60806636" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.910238 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.919331 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.928597 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.938660 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.957762 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:05:30 crc kubenswrapper[4634]: I0929 14:05:30.958180 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.062140 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-scripts\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.062189 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.062227 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-config-data\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.062258 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-log-httpd\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.062321 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z46v\" (UniqueName: \"kubernetes.io/projected/7156e190-1481-46e7-82fc-98c910372ba5-kube-api-access-7z46v\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.062345 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-run-httpd\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.062716 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.168441 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.168514 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-scripts\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.168536 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.168568 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-config-data\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.168598 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-log-httpd\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.168643 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z46v\" (UniqueName: \"kubernetes.io/projected/7156e190-1481-46e7-82fc-98c910372ba5-kube-api-access-7z46v\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.168667 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-run-httpd\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.170350 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-log-httpd\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.195867 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-run-httpd\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.209692 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-scripts\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.210134 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-config-data\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.211032 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.218494 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z46v\" (UniqueName: \"kubernetes.io/projected/7156e190-1481-46e7-82fc-98c910372ba5-kube-api-access-7z46v\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.219300 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: W0929 14:05:31.235053 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d5b014b_6225_45cf_afcd_4263a6347f2d.slice/crio-b9539864be770775291ff8269b1346b19b84c9725e72f3f24bf88870112c328d WatchSource:0}: Error finding container b9539864be770775291ff8269b1346b19b84c9725e72f3f24bf88870112c328d: Status 404 returned error can't find the container with id b9539864be770775291ff8269b1346b19b84c9725e72f3f24bf88870112c328d Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.250495 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-f016-account-create-srmqs"] Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.264703 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.288443 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.523239 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f016-account-create-srmqs" event={"ID":"5d5b014b-6225-45cf-afcd-4263a6347f2d","Type":"ContainerStarted","Data":"b9539864be770775291ff8269b1346b19b84c9725e72f3f24bf88870112c328d"} Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.575304 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-a71a-account-create-2d57l"] Sep 29 14:05:31 crc kubenswrapper[4634]: I0929 14:05:31.861434 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-d98c-account-create-2f7ww"] Sep 29 14:05:31 crc kubenswrapper[4634]: W0929 14:05:31.870331 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e65b300_4b51_45f3_8a31_9a95755cb81a.slice/crio-5668e5e2bf634476d2782af1c39e650680c913d663494d4a2b928fd4312a2cb6 WatchSource:0}: Error finding container 5668e5e2bf634476d2782af1c39e650680c913d663494d4a2b928fd4312a2cb6: Status 404 returned error can't find the container with id 5668e5e2bf634476d2782af1c39e650680c913d663494d4a2b928fd4312a2cb6 Sep 29 14:05:32 crc kubenswrapper[4634]: I0929 14:05:32.151633 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e14db37b-1e3b-4faf-b73d-339e69007677" path="/var/lib/kubelet/pods/e14db37b-1e3b-4faf-b73d-339e69007677/volumes" Sep 29 14:05:32 crc kubenswrapper[4634]: I0929 14:05:32.493464 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:32 crc kubenswrapper[4634]: I0929 14:05:32.544883 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-a71a-account-create-2d57l" event={"ID":"575fd438-a63c-4fe1-8bc7-e8d293176ec0","Type":"ContainerStarted","Data":"d6777d4c33d970e3caef177cf31fd51a89f0a622e6838ce1ac077ba7beef268f"} Sep 29 14:05:32 crc kubenswrapper[4634]: I0929 14:05:32.546428 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerStarted","Data":"efec551314a670a9d66030c188d947e7996b63b01307d0a4825ae87b87645af9"} Sep 29 14:05:32 crc kubenswrapper[4634]: I0929 14:05:32.548050 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d98c-account-create-2f7ww" event={"ID":"2e65b300-4b51-45f3-8a31-9a95755cb81a","Type":"ContainerStarted","Data":"5668e5e2bf634476d2782af1c39e650680c913d663494d4a2b928fd4312a2cb6"} Sep 29 14:05:33 crc kubenswrapper[4634]: I0929 14:05:33.488239 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 14:05:33 crc kubenswrapper[4634]: I0929 14:05:33.488651 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 14:05:33 crc kubenswrapper[4634]: I0929 14:05:33.530835 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 14:05:33 crc kubenswrapper[4634]: I0929 14:05:33.532219 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 14:05:33 crc kubenswrapper[4634]: I0929 14:05:33.568609 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 14:05:33 crc kubenswrapper[4634]: I0929 14:05:33.568660 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 14:05:34 crc kubenswrapper[4634]: I0929 14:05:34.866978 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.168:8776/healthcheck\": dial tcp 10.217.0.168:8776: connect: connection refused" Sep 29 14:05:35 crc kubenswrapper[4634]: I0929 14:05:35.792524 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:35 crc kubenswrapper[4634]: I0929 14:05:35.793618 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:35 crc kubenswrapper[4634]: I0929 14:05:35.843387 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:35 crc kubenswrapper[4634]: I0929 14:05:35.848288 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:36 crc kubenswrapper[4634]: I0929 14:05:36.599278 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:36 crc kubenswrapper[4634]: I0929 14:05:36.599330 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:38 crc kubenswrapper[4634]: I0929 14:05:38.596428 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-scheduler-0" podUID="03850223-163a-4eca-a290-1d072a2b535d" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.173:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:38 crc kubenswrapper[4634]: I0929 14:05:38.711566 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-rq5pg" podUID="e6c834dc-3418-4d52-ade3-02c1043d6360" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.87:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:39 crc kubenswrapper[4634]: I0929 14:05:39.867588 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.168:8776/healthcheck\": dial tcp 10.217.0.168:8776: connect: connection refused" Sep 29 14:05:42 crc kubenswrapper[4634]: I0929 14:05:42.362446 4634 generic.go:334] "Generic (PLEG): container finished" podID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerID="45005b4b189951ad9dbb4e11d7957914c56cddd75261b7d2f7cf763bbc1a150d" exitCode=-1 Sep 29 14:05:42 crc kubenswrapper[4634]: I0929 14:05:42.364453 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22eacd73-72e4-43db-8c89-6a380b51a08e","Type":"ContainerDied","Data":"45005b4b189951ad9dbb4e11d7957914c56cddd75261b7d2f7cf763bbc1a150d"} Sep 29 14:05:43 crc kubenswrapper[4634]: I0929 14:05:43.639332 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-scheduler-0" podUID="03850223-163a-4eca-a290-1d072a2b535d" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.173:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:05:44 crc kubenswrapper[4634]: I0929 14:05:44.455280 4634 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 3.459779996s: [/var/lib/containers/storage/overlay/bd3be01047e989d7448d33f9733e33c0bb103a4cd43d1b2d9847bceb38e8da86/diff /var/log/pods/openstack_placement-5bfb7db698-tmn8x_2b38f115-526d-4093-b79c-19e6b9258dbf/placement-api/0.log]; will not log again for this container unless duration exceeds 2s Sep 29 14:05:44 crc kubenswrapper[4634]: I0929 14:05:44.867444 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.168:8776/healthcheck\": dial tcp 10.217.0.168:8776: connect: connection refused" Sep 29 14:05:44 crc kubenswrapper[4634]: I0929 14:05:44.867606 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 14:05:45 crc kubenswrapper[4634]: I0929 14:05:45.405146 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f016-account-create-srmqs" event={"ID":"5d5b014b-6225-45cf-afcd-4263a6347f2d","Type":"ContainerStarted","Data":"41f071213ddd2359d56c2bcec7b71b96726b49ba592d01abba927ba746c1f3d6"} Sep 29 14:05:45 crc kubenswrapper[4634]: I0929 14:05:45.406836 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-a71a-account-create-2d57l" event={"ID":"575fd438-a63c-4fe1-8bc7-e8d293176ec0","Type":"ContainerStarted","Data":"95561b3d9bf0df8cd81d8da175a9bf638c826d122eca76f548f5467310daf6e4"} Sep 29 14:05:45 crc kubenswrapper[4634]: I0929 14:05:45.408315 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d98c-account-create-2f7ww" event={"ID":"2e65b300-4b51-45f3-8a31-9a95755cb81a","Type":"ContainerStarted","Data":"d7559e56cdd92910bba890d7d2fa3853a747697534885ed0b940e3a95bdf738b"} Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.032909 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.184593 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data-custom\") pod \"22eacd73-72e4-43db-8c89-6a380b51a08e\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.184724 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22eacd73-72e4-43db-8c89-6a380b51a08e-etc-machine-id\") pod \"22eacd73-72e4-43db-8c89-6a380b51a08e\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.184904 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-combined-ca-bundle\") pod \"22eacd73-72e4-43db-8c89-6a380b51a08e\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.184958 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-scripts\") pod \"22eacd73-72e4-43db-8c89-6a380b51a08e\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.185027 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data\") pod \"22eacd73-72e4-43db-8c89-6a380b51a08e\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.185101 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2mh7\" (UniqueName: \"kubernetes.io/projected/22eacd73-72e4-43db-8c89-6a380b51a08e-kube-api-access-q2mh7\") pod \"22eacd73-72e4-43db-8c89-6a380b51a08e\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.185145 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22eacd73-72e4-43db-8c89-6a380b51a08e-logs\") pod \"22eacd73-72e4-43db-8c89-6a380b51a08e\" (UID: \"22eacd73-72e4-43db-8c89-6a380b51a08e\") " Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.186364 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/22eacd73-72e4-43db-8c89-6a380b51a08e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "22eacd73-72e4-43db-8c89-6a380b51a08e" (UID: "22eacd73-72e4-43db-8c89-6a380b51a08e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.187394 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22eacd73-72e4-43db-8c89-6a380b51a08e-logs" (OuterVolumeSpecName: "logs") pod "22eacd73-72e4-43db-8c89-6a380b51a08e" (UID: "22eacd73-72e4-43db-8c89-6a380b51a08e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.198367 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "22eacd73-72e4-43db-8c89-6a380b51a08e" (UID: "22eacd73-72e4-43db-8c89-6a380b51a08e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.202175 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22eacd73-72e4-43db-8c89-6a380b51a08e-kube-api-access-q2mh7" (OuterVolumeSpecName: "kube-api-access-q2mh7") pod "22eacd73-72e4-43db-8c89-6a380b51a08e" (UID: "22eacd73-72e4-43db-8c89-6a380b51a08e"). InnerVolumeSpecName "kube-api-access-q2mh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.203545 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-scripts" (OuterVolumeSpecName: "scripts") pod "22eacd73-72e4-43db-8c89-6a380b51a08e" (UID: "22eacd73-72e4-43db-8c89-6a380b51a08e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.252518 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data" (OuterVolumeSpecName: "config-data") pod "22eacd73-72e4-43db-8c89-6a380b51a08e" (UID: "22eacd73-72e4-43db-8c89-6a380b51a08e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.252985 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "22eacd73-72e4-43db-8c89-6a380b51a08e" (UID: "22eacd73-72e4-43db-8c89-6a380b51a08e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.287485 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.287530 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.287708 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.287718 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2mh7\" (UniqueName: \"kubernetes.io/projected/22eacd73-72e4-43db-8c89-6a380b51a08e-kube-api-access-q2mh7\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.287734 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/22eacd73-72e4-43db-8c89-6a380b51a08e-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.287744 4634 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/22eacd73-72e4-43db-8c89-6a380b51a08e-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.287753 4634 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/22eacd73-72e4-43db-8c89-6a380b51a08e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.420532 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.420504 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"22eacd73-72e4-43db-8c89-6a380b51a08e","Type":"ContainerDied","Data":"73bdb45fd062054a9f04019c1561a29f2cf1506437b9e9dba0ed387974b13e11"} Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.420782 4634 scope.go:117] "RemoveContainer" containerID="45005b4b189951ad9dbb4e11d7957914c56cddd75261b7d2f7cf763bbc1a150d" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.446320 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-d98c-account-create-2f7ww" podStartSLOduration=16.446295338 podStartE2EDuration="16.446295338s" podCreationTimestamp="2025-09-29 14:05:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:46.442969468 +0000 UTC m=+1277.011697217" watchObservedRunningTime="2025-09-29 14:05:46.446295338 +0000 UTC m=+1277.015023087" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.471393 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-f016-account-create-srmqs" podStartSLOduration=17.471369033 podStartE2EDuration="17.471369033s" podCreationTimestamp="2025-09-29 14:05:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:46.468990619 +0000 UTC m=+1277.037718388" watchObservedRunningTime="2025-09-29 14:05:46.471369033 +0000 UTC m=+1277.040096782" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.498471 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-a71a-account-create-2d57l" podStartSLOduration=16.498448003 podStartE2EDuration="16.498448003s" podCreationTimestamp="2025-09-29 14:05:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:46.493436488 +0000 UTC m=+1277.062164237" watchObservedRunningTime="2025-09-29 14:05:46.498448003 +0000 UTC m=+1277.067175752" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.524620 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.532119 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.558041 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:05:46 crc kubenswrapper[4634]: E0929 14:05:46.558986 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.559103 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api" Sep 29 14:05:46 crc kubenswrapper[4634]: E0929 14:05:46.559230 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api-log" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.559304 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api-log" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.559589 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api-log" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.559686 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" containerName="cinder-api" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.561284 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.568665 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.568848 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.568875 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.575350 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.697883 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-scripts\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.698013 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6n8l\" (UniqueName: \"kubernetes.io/projected/8739a6eb-884d-49c7-8ff9-e44b56575552-kube-api-access-n6n8l\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.698072 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-config-data\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.698166 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.698191 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.698241 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8739a6eb-884d-49c7-8ff9-e44b56575552-logs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.698327 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-config-data-custom\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.698345 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.698364 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8739a6eb-884d-49c7-8ff9-e44b56575552-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.794999 4634 scope.go:117] "RemoveContainer" containerID="6e0b5c89c3a5dd3a5b1a943b111b5fa740c956fc698e2f48d3abbb617e765949" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.800736 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.800808 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-config-data-custom\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.800856 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8739a6eb-884d-49c7-8ff9-e44b56575552-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.800925 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-scripts\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.801005 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6n8l\" (UniqueName: \"kubernetes.io/projected/8739a6eb-884d-49c7-8ff9-e44b56575552-kube-api-access-n6n8l\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.801069 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-config-data\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.801180 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.801239 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.801348 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8739a6eb-884d-49c7-8ff9-e44b56575552-logs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.802009 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8739a6eb-884d-49c7-8ff9-e44b56575552-logs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.803506 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8739a6eb-884d-49c7-8ff9-e44b56575552-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.821061 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-config-data\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.822196 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-config-data-custom\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.826141 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.827910 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-scripts\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.830393 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.835308 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6n8l\" (UniqueName: \"kubernetes.io/projected/8739a6eb-884d-49c7-8ff9-e44b56575552-kube-api-access-n6n8l\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.841464 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8739a6eb-884d-49c7-8ff9-e44b56575552-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8739a6eb-884d-49c7-8ff9-e44b56575552\") " pod="openstack/cinder-api-0" Sep 29 14:05:46 crc kubenswrapper[4634]: I0929 14:05:46.881703 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 14:05:48 crc kubenswrapper[4634]: I0929 14:05:48.126431 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22eacd73-72e4-43db-8c89-6a380b51a08e" path="/var/lib/kubelet/pods/22eacd73-72e4-43db-8c89-6a380b51a08e/volumes" Sep 29 14:05:48 crc kubenswrapper[4634]: I0929 14:05:48.447590 4634 generic.go:334] "Generic (PLEG): container finished" podID="575fd438-a63c-4fe1-8bc7-e8d293176ec0" containerID="95561b3d9bf0df8cd81d8da175a9bf638c826d122eca76f548f5467310daf6e4" exitCode=0 Sep 29 14:05:48 crc kubenswrapper[4634]: I0929 14:05:48.447636 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-a71a-account-create-2d57l" event={"ID":"575fd438-a63c-4fe1-8bc7-e8d293176ec0","Type":"ContainerDied","Data":"95561b3d9bf0df8cd81d8da175a9bf638c826d122eca76f548f5467310daf6e4"} Sep 29 14:05:49 crc kubenswrapper[4634]: I0929 14:05:49.011104 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 14:05:49 crc kubenswrapper[4634]: I0929 14:05:49.011742 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 14:05:49 crc kubenswrapper[4634]: I0929 14:05:49.118282 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:49 crc kubenswrapper[4634]: I0929 14:05:49.118825 4634 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 14:05:49 crc kubenswrapper[4634]: I0929 14:05:49.281274 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 14:05:49 crc kubenswrapper[4634]: I0929 14:05:49.285623 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 14:05:49 crc kubenswrapper[4634]: I0929 14:05:49.569539 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.266770 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-a71a-account-create-2d57l" Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.333748 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gfnh\" (UniqueName: \"kubernetes.io/projected/575fd438-a63c-4fe1-8bc7-e8d293176ec0-kube-api-access-7gfnh\") pod \"575fd438-a63c-4fe1-8bc7-e8d293176ec0\" (UID: \"575fd438-a63c-4fe1-8bc7-e8d293176ec0\") " Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.345654 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/575fd438-a63c-4fe1-8bc7-e8d293176ec0-kube-api-access-7gfnh" (OuterVolumeSpecName: "kube-api-access-7gfnh") pod "575fd438-a63c-4fe1-8bc7-e8d293176ec0" (UID: "575fd438-a63c-4fe1-8bc7-e8d293176ec0"). InnerVolumeSpecName "kube-api-access-7gfnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.436340 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gfnh\" (UniqueName: \"kubernetes.io/projected/575fd438-a63c-4fe1-8bc7-e8d293176ec0-kube-api-access-7gfnh\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.478995 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerStarted","Data":"4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf"} Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.486488 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8739a6eb-884d-49c7-8ff9-e44b56575552","Type":"ContainerStarted","Data":"9f467de33798aa9412f9b8d89d4cbded1c540afcf1557c40a53e8ff49162bd0d"} Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.490890 4634 generic.go:334] "Generic (PLEG): container finished" podID="2e65b300-4b51-45f3-8a31-9a95755cb81a" containerID="d7559e56cdd92910bba890d7d2fa3853a747697534885ed0b940e3a95bdf738b" exitCode=0 Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.490962 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d98c-account-create-2f7ww" event={"ID":"2e65b300-4b51-45f3-8a31-9a95755cb81a","Type":"ContainerDied","Data":"d7559e56cdd92910bba890d7d2fa3853a747697534885ed0b940e3a95bdf738b"} Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.500448 4634 generic.go:334] "Generic (PLEG): container finished" podID="5d5b014b-6225-45cf-afcd-4263a6347f2d" containerID="41f071213ddd2359d56c2bcec7b71b96726b49ba592d01abba927ba746c1f3d6" exitCode=0 Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.500534 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f016-account-create-srmqs" event={"ID":"5d5b014b-6225-45cf-afcd-4263a6347f2d","Type":"ContainerDied","Data":"41f071213ddd2359d56c2bcec7b71b96726b49ba592d01abba927ba746c1f3d6"} Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.506510 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-a71a-account-create-2d57l" event={"ID":"575fd438-a63c-4fe1-8bc7-e8d293176ec0","Type":"ContainerDied","Data":"d6777d4c33d970e3caef177cf31fd51a89f0a622e6838ce1ac077ba7beef268f"} Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.506565 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6777d4c33d970e3caef177cf31fd51a89f0a622e6838ce1ac077ba7beef268f" Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.506659 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-a71a-account-create-2d57l" Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.512526 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"041479d7-0e40-4b0c-b301-f79c133394dc","Type":"ContainerStarted","Data":"330ed70fec9ce2422bb5a0f46474623216722088ed5f10dc412f97166822aac8"} Sep 29 14:05:50 crc kubenswrapper[4634]: I0929 14:05:50.589919 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.09827694 podStartE2EDuration="1m0.589862323s" podCreationTimestamp="2025-09-29 14:04:50 +0000 UTC" firstStartedPulling="2025-09-29 14:04:51.302615896 +0000 UTC m=+1221.871343645" lastFinishedPulling="2025-09-29 14:05:48.794201279 +0000 UTC m=+1279.362929028" observedRunningTime="2025-09-29 14:05:50.554354297 +0000 UTC m=+1281.123082046" watchObservedRunningTime="2025-09-29 14:05:50.589862323 +0000 UTC m=+1281.158590072" Sep 29 14:05:51 crc kubenswrapper[4634]: I0929 14:05:51.537018 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8739a6eb-884d-49c7-8ff9-e44b56575552","Type":"ContainerStarted","Data":"801f8ac035b5748824c8cc88ada717f85249c68b59e716e37b0a5f9dbf03a4cb"} Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.102144 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f016-account-create-srmqs" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.142069 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.174625 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d98c-account-create-2f7ww" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.218476 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcrr9\" (UniqueName: \"kubernetes.io/projected/5d5b014b-6225-45cf-afcd-4263a6347f2d-kube-api-access-jcrr9\") pod \"5d5b014b-6225-45cf-afcd-4263a6347f2d\" (UID: \"5d5b014b-6225-45cf-afcd-4263a6347f2d\") " Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.242360 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d5b014b-6225-45cf-afcd-4263a6347f2d-kube-api-access-jcrr9" (OuterVolumeSpecName: "kube-api-access-jcrr9") pod "5d5b014b-6225-45cf-afcd-4263a6347f2d" (UID: "5d5b014b-6225-45cf-afcd-4263a6347f2d"). InnerVolumeSpecName "kube-api-access-jcrr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.328226 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnnvb\" (UniqueName: \"kubernetes.io/projected/2e65b300-4b51-45f3-8a31-9a95755cb81a-kube-api-access-jnnvb\") pod \"2e65b300-4b51-45f3-8a31-9a95755cb81a\" (UID: \"2e65b300-4b51-45f3-8a31-9a95755cb81a\") " Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.329845 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcrr9\" (UniqueName: \"kubernetes.io/projected/5d5b014b-6225-45cf-afcd-4263a6347f2d-kube-api-access-jcrr9\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.345336 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e65b300-4b51-45f3-8a31-9a95755cb81a-kube-api-access-jnnvb" (OuterVolumeSpecName: "kube-api-access-jnnvb") pod "2e65b300-4b51-45f3-8a31-9a95755cb81a" (UID: "2e65b300-4b51-45f3-8a31-9a95755cb81a"). InnerVolumeSpecName "kube-api-access-jnnvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.431549 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnnvb\" (UniqueName: \"kubernetes.io/projected/2e65b300-4b51-45f3-8a31-9a95755cb81a-kube-api-access-jnnvb\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.593399 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerStarted","Data":"19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea"} Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.610218 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-d98c-account-create-2f7ww" event={"ID":"2e65b300-4b51-45f3-8a31-9a95755cb81a","Type":"ContainerDied","Data":"5668e5e2bf634476d2782af1c39e650680c913d663494d4a2b928fd4312a2cb6"} Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.610508 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5668e5e2bf634476d2782af1c39e650680c913d663494d4a2b928fd4312a2cb6" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.610651 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-d98c-account-create-2f7ww" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.624348 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-f016-account-create-srmqs" event={"ID":"5d5b014b-6225-45cf-afcd-4263a6347f2d","Type":"ContainerDied","Data":"b9539864be770775291ff8269b1346b19b84c9725e72f3f24bf88870112c328d"} Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.624411 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9539864be770775291ff8269b1346b19b84c9725e72f3f24bf88870112c328d" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.624508 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-f016-account-create-srmqs" Sep 29 14:05:52 crc kubenswrapper[4634]: I0929 14:05:52.905813 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:53 crc kubenswrapper[4634]: I0929 14:05:53.638005 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8739a6eb-884d-49c7-8ff9-e44b56575552","Type":"ContainerStarted","Data":"f4e345b1a2a64bca4cd92d52ffc4d37f3e10af367193b329b98217adefab91d1"} Sep 29 14:05:53 crc kubenswrapper[4634]: I0929 14:05:53.638680 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 14:05:53 crc kubenswrapper[4634]: I0929 14:05:53.661860 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerStarted","Data":"e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4"} Sep 29 14:05:53 crc kubenswrapper[4634]: I0929 14:05:53.674334 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=7.674299766 podStartE2EDuration="7.674299766s" podCreationTimestamp="2025-09-29 14:05:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:05:53.667935515 +0000 UTC m=+1284.236663254" watchObservedRunningTime="2025-09-29 14:05:53.674299766 +0000 UTC m=+1284.243027535" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.622717 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjgff"] Sep 29 14:05:55 crc kubenswrapper[4634]: E0929 14:05:55.624057 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e65b300-4b51-45f3-8a31-9a95755cb81a" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.624073 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e65b300-4b51-45f3-8a31-9a95755cb81a" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: E0929 14:05:55.624108 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d5b014b-6225-45cf-afcd-4263a6347f2d" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.624115 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d5b014b-6225-45cf-afcd-4263a6347f2d" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: E0929 14:05:55.624140 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="575fd438-a63c-4fe1-8bc7-e8d293176ec0" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.624148 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="575fd438-a63c-4fe1-8bc7-e8d293176ec0" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.624359 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="575fd438-a63c-4fe1-8bc7-e8d293176ec0" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.624371 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d5b014b-6225-45cf-afcd-4263a6347f2d" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.624393 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e65b300-4b51-45f3-8a31-9a95755cb81a" containerName="mariadb-account-create" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.625110 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.628577 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.628842 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-w4clr" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.628972 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.644887 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjgff"] Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.696034 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerStarted","Data":"9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950"} Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.696285 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="ceilometer-central-agent" containerID="cri-o://4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf" gracePeriod=30 Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.696373 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.696544 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="proxy-httpd" containerID="cri-o://9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950" gracePeriod=30 Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.696693 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="ceilometer-notification-agent" containerID="cri-o://19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea" gracePeriod=30 Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.696714 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="sg-core" containerID="cri-o://e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4" gracePeriod=30 Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.713833 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wkd5\" (UniqueName: \"kubernetes.io/projected/dec8206e-efde-472c-9342-1ac2e0913508-kube-api-access-9wkd5\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.713958 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-scripts\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.713998 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-config-data\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.714038 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.816197 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-config-data\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.816316 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.816431 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wkd5\" (UniqueName: \"kubernetes.io/projected/dec8206e-efde-472c-9342-1ac2e0913508-kube-api-access-9wkd5\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.816513 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-scripts\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.826231 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.838781 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-scripts\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.838848 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-config-data\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.847640 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wkd5\" (UniqueName: \"kubernetes.io/projected/dec8206e-efde-472c-9342-1ac2e0913508-kube-api-access-9wkd5\") pod \"nova-cell0-conductor-db-sync-gjgff\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:55 crc kubenswrapper[4634]: I0929 14:05:55.944940 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.569352 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.58458688 podStartE2EDuration="26.569333068s" podCreationTimestamp="2025-09-29 14:05:30 +0000 UTC" firstStartedPulling="2025-09-29 14:05:32.512779946 +0000 UTC m=+1263.081507695" lastFinishedPulling="2025-09-29 14:05:54.497526134 +0000 UTC m=+1285.066253883" observedRunningTime="2025-09-29 14:05:55.735296009 +0000 UTC m=+1286.304023758" watchObservedRunningTime="2025-09-29 14:05:56.569333068 +0000 UTC m=+1287.138060807" Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.573652 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjgff"] Sep 29 14:05:56 crc kubenswrapper[4634]: W0929 14:05:56.579381 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddec8206e_efde_472c_9342_1ac2e0913508.slice/crio-0cf7015a65fc3f931b399daddb0f5ec2611e8fe9508740701f24b5e13f0c5574 WatchSource:0}: Error finding container 0cf7015a65fc3f931b399daddb0f5ec2611e8fe9508740701f24b5e13f0c5574: Status 404 returned error can't find the container with id 0cf7015a65fc3f931b399daddb0f5ec2611e8fe9508740701f24b5e13f0c5574 Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.709424 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-gjgff" event={"ID":"dec8206e-efde-472c-9342-1ac2e0913508","Type":"ContainerStarted","Data":"0cf7015a65fc3f931b399daddb0f5ec2611e8fe9508740701f24b5e13f0c5574"} Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.714256 4634 generic.go:334] "Generic (PLEG): container finished" podID="7156e190-1481-46e7-82fc-98c910372ba5" containerID="9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950" exitCode=0 Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.714296 4634 generic.go:334] "Generic (PLEG): container finished" podID="7156e190-1481-46e7-82fc-98c910372ba5" containerID="e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4" exitCode=2 Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.714304 4634 generic.go:334] "Generic (PLEG): container finished" podID="7156e190-1481-46e7-82fc-98c910372ba5" containerID="19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea" exitCode=0 Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.714335 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerDied","Data":"9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950"} Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.714394 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerDied","Data":"e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4"} Sep 29 14:05:56 crc kubenswrapper[4634]: I0929 14:05:56.714404 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerDied","Data":"19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea"} Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.186839 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.293840 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-sg-core-conf-yaml\") pod \"7156e190-1481-46e7-82fc-98c910372ba5\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.294071 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-run-httpd\") pod \"7156e190-1481-46e7-82fc-98c910372ba5\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.294118 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-scripts\") pod \"7156e190-1481-46e7-82fc-98c910372ba5\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.294190 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-log-httpd\") pod \"7156e190-1481-46e7-82fc-98c910372ba5\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.294386 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-combined-ca-bundle\") pod \"7156e190-1481-46e7-82fc-98c910372ba5\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.294414 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-config-data\") pod \"7156e190-1481-46e7-82fc-98c910372ba5\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.294475 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z46v\" (UniqueName: \"kubernetes.io/projected/7156e190-1481-46e7-82fc-98c910372ba5-kube-api-access-7z46v\") pod \"7156e190-1481-46e7-82fc-98c910372ba5\" (UID: \"7156e190-1481-46e7-82fc-98c910372ba5\") " Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.294957 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7156e190-1481-46e7-82fc-98c910372ba5" (UID: "7156e190-1481-46e7-82fc-98c910372ba5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.295306 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7156e190-1481-46e7-82fc-98c910372ba5" (UID: "7156e190-1481-46e7-82fc-98c910372ba5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.306324 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7156e190-1481-46e7-82fc-98c910372ba5-kube-api-access-7z46v" (OuterVolumeSpecName: "kube-api-access-7z46v") pod "7156e190-1481-46e7-82fc-98c910372ba5" (UID: "7156e190-1481-46e7-82fc-98c910372ba5"). InnerVolumeSpecName "kube-api-access-7z46v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.307069 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-scripts" (OuterVolumeSpecName: "scripts") pod "7156e190-1481-46e7-82fc-98c910372ba5" (UID: "7156e190-1481-46e7-82fc-98c910372ba5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.391902 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7156e190-1481-46e7-82fc-98c910372ba5" (UID: "7156e190-1481-46e7-82fc-98c910372ba5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.399950 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z46v\" (UniqueName: \"kubernetes.io/projected/7156e190-1481-46e7-82fc-98c910372ba5-kube-api-access-7z46v\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.399986 4634 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.400000 4634 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.400009 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.400019 4634 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7156e190-1481-46e7-82fc-98c910372ba5-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.446232 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7156e190-1481-46e7-82fc-98c910372ba5" (UID: "7156e190-1481-46e7-82fc-98c910372ba5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.472477 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-config-data" (OuterVolumeSpecName: "config-data") pod "7156e190-1481-46e7-82fc-98c910372ba5" (UID: "7156e190-1481-46e7-82fc-98c910372ba5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.502162 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.502208 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7156e190-1481-46e7-82fc-98c910372ba5-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.747197 4634 generic.go:334] "Generic (PLEG): container finished" podID="7156e190-1481-46e7-82fc-98c910372ba5" containerID="4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf" exitCode=0 Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.747251 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerDied","Data":"4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf"} Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.747285 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7156e190-1481-46e7-82fc-98c910372ba5","Type":"ContainerDied","Data":"efec551314a670a9d66030c188d947e7996b63b01307d0a4825ae87b87645af9"} Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.747307 4634 scope.go:117] "RemoveContainer" containerID="9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.747458 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.799398 4634 scope.go:117] "RemoveContainer" containerID="e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.806054 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.832219 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.848768 4634 scope.go:117] "RemoveContainer" containerID="19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.852659 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:59 crc kubenswrapper[4634]: E0929 14:05:59.853228 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="ceilometer-notification-agent" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.853254 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="ceilometer-notification-agent" Sep 29 14:05:59 crc kubenswrapper[4634]: E0929 14:05:59.853285 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="sg-core" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.853295 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="sg-core" Sep 29 14:05:59 crc kubenswrapper[4634]: E0929 14:05:59.853313 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="proxy-httpd" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.853321 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="proxy-httpd" Sep 29 14:05:59 crc kubenswrapper[4634]: E0929 14:05:59.853346 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="ceilometer-central-agent" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.853356 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="ceilometer-central-agent" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.853581 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="proxy-httpd" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.853618 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="sg-core" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.853634 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="ceilometer-notification-agent" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.853653 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="7156e190-1481-46e7-82fc-98c910372ba5" containerName="ceilometer-central-agent" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.856190 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.860104 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.861864 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.862222 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.915745 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-scripts\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.915825 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-run-httpd\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.915880 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.915906 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-config-data\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.915936 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fv4t\" (UniqueName: \"kubernetes.io/projected/5841d425-dec2-437b-9b99-f52212dfb5fe-kube-api-access-7fv4t\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.915960 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.915977 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-log-httpd\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.946074 4634 scope.go:117] "RemoveContainer" containerID="4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.974552 4634 scope.go:117] "RemoveContainer" containerID="9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950" Sep 29 14:05:59 crc kubenswrapper[4634]: E0929 14:05:59.975322 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950\": container with ID starting with 9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950 not found: ID does not exist" containerID="9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.975356 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950"} err="failed to get container status \"9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950\": rpc error: code = NotFound desc = could not find container \"9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950\": container with ID starting with 9c72a62b62446c5148626a1e57ab25e86ea0e7d5d336e574f4479b5b1e3c2950 not found: ID does not exist" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.975379 4634 scope.go:117] "RemoveContainer" containerID="e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4" Sep 29 14:05:59 crc kubenswrapper[4634]: E0929 14:05:59.977773 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4\": container with ID starting with e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4 not found: ID does not exist" containerID="e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.977796 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4"} err="failed to get container status \"e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4\": rpc error: code = NotFound desc = could not find container \"e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4\": container with ID starting with e003f489310fcb4274935297a0a8806e68b9f90fca1007352dfe2527b8bf2ae4 not found: ID does not exist" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.977809 4634 scope.go:117] "RemoveContainer" containerID="19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea" Sep 29 14:05:59 crc kubenswrapper[4634]: E0929 14:05:59.981717 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea\": container with ID starting with 19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea not found: ID does not exist" containerID="19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.981748 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea"} err="failed to get container status \"19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea\": rpc error: code = NotFound desc = could not find container \"19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea\": container with ID starting with 19c3c1564348025765f315562588ffcf662519216257850e1bfdb77b99d8bbea not found: ID does not exist" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.981769 4634 scope.go:117] "RemoveContainer" containerID="4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf" Sep 29 14:05:59 crc kubenswrapper[4634]: E0929 14:05:59.982230 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf\": container with ID starting with 4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf not found: ID does not exist" containerID="4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf" Sep 29 14:05:59 crc kubenswrapper[4634]: I0929 14:05:59.982251 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf"} err="failed to get container status \"4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf\": rpc error: code = NotFound desc = could not find container \"4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf\": container with ID starting with 4552779fffbd853072928f56d93f78a6e75f06b5f9abc5c8202c6f2d407322cf not found: ID does not exist" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.017519 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.017570 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-config-data\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.017602 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fv4t\" (UniqueName: \"kubernetes.io/projected/5841d425-dec2-437b-9b99-f52212dfb5fe-kube-api-access-7fv4t\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.017627 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.017647 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-log-httpd\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.017706 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-scripts\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.017751 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-run-httpd\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.018491 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-log-httpd\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.018569 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-run-httpd\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.025541 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-config-data\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.028827 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.034499 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.040161 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-scripts\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.044641 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fv4t\" (UniqueName: \"kubernetes.io/projected/5841d425-dec2-437b-9b99-f52212dfb5fe-kube-api-access-7fv4t\") pod \"ceilometer-0\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.130959 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7156e190-1481-46e7-82fc-98c910372ba5" path="/var/lib/kubelet/pods/7156e190-1481-46e7-82fc-98c910372ba5/volumes" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.227802 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:00 crc kubenswrapper[4634]: I0929 14:06:00.841021 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:01 crc kubenswrapper[4634]: I0929 14:06:01.800390 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerStarted","Data":"f1ee56bd57708828a09a59cf42c9e7aa598189cd75cdfc196b66f1364de5e4b7"} Sep 29 14:06:04 crc kubenswrapper[4634]: I0929 14:06:04.493632 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 14:06:08 crc kubenswrapper[4634]: I0929 14:06:08.456063 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:09 crc kubenswrapper[4634]: I0929 14:06:09.893819 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-gjgff" event={"ID":"dec8206e-efde-472c-9342-1ac2e0913508","Type":"ContainerStarted","Data":"30f71aa55e79d407cb936964a158516f3fa28f5592ee3f678476206d3325268e"} Sep 29 14:06:09 crc kubenswrapper[4634]: I0929 14:06:09.897515 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerStarted","Data":"e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764"} Sep 29 14:06:09 crc kubenswrapper[4634]: I0929 14:06:09.897671 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerStarted","Data":"b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d"} Sep 29 14:06:09 crc kubenswrapper[4634]: I0929 14:06:09.915235 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-gjgff" podStartSLOduration=2.883244075 podStartE2EDuration="14.915211878s" podCreationTimestamp="2025-09-29 14:05:55 +0000 UTC" firstStartedPulling="2025-09-29 14:05:56.582260445 +0000 UTC m=+1287.150988204" lastFinishedPulling="2025-09-29 14:06:08.614228258 +0000 UTC m=+1299.182956007" observedRunningTime="2025-09-29 14:06:09.906999773 +0000 UTC m=+1300.475727522" watchObservedRunningTime="2025-09-29 14:06:09.915211878 +0000 UTC m=+1300.483939627" Sep 29 14:06:10 crc kubenswrapper[4634]: I0929 14:06:10.915346 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerStarted","Data":"9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9"} Sep 29 14:06:13 crc kubenswrapper[4634]: I0929 14:06:13.946046 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerStarted","Data":"cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96"} Sep 29 14:06:13 crc kubenswrapper[4634]: I0929 14:06:13.946326 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="sg-core" containerID="cri-o://9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9" gracePeriod=30 Sep 29 14:06:13 crc kubenswrapper[4634]: I0929 14:06:13.946333 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="proxy-httpd" containerID="cri-o://cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96" gracePeriod=30 Sep 29 14:06:13 crc kubenswrapper[4634]: I0929 14:06:13.946316 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="ceilometer-central-agent" containerID="cri-o://b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d" gracePeriod=30 Sep 29 14:06:13 crc kubenswrapper[4634]: I0929 14:06:13.946408 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="ceilometer-notification-agent" containerID="cri-o://e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764" gracePeriod=30 Sep 29 14:06:13 crc kubenswrapper[4634]: I0929 14:06:13.954985 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:06:13 crc kubenswrapper[4634]: I0929 14:06:13.985394 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.147790212 podStartE2EDuration="14.985368979s" podCreationTimestamp="2025-09-29 14:05:59 +0000 UTC" firstStartedPulling="2025-09-29 14:06:00.860983643 +0000 UTC m=+1291.429711392" lastFinishedPulling="2025-09-29 14:06:12.69856241 +0000 UTC m=+1303.267290159" observedRunningTime="2025-09-29 14:06:13.976467406 +0000 UTC m=+1304.545195155" watchObservedRunningTime="2025-09-29 14:06:13.985368979 +0000 UTC m=+1304.554096728" Sep 29 14:06:14 crc kubenswrapper[4634]: E0929 14:06:14.262681 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5841d425_dec2_437b_9b99_f52212dfb5fe.slice/crio-9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5841d425_dec2_437b_9b99_f52212dfb5fe.slice/crio-cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96.scope\": RecentStats: unable to find data in memory cache]" Sep 29 14:06:14 crc kubenswrapper[4634]: I0929 14:06:14.398891 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:06:14 crc kubenswrapper[4634]: I0929 14:06:14.398996 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:06:14 crc kubenswrapper[4634]: I0929 14:06:14.975976 4634 generic.go:334] "Generic (PLEG): container finished" podID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerID="cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96" exitCode=0 Sep 29 14:06:14 crc kubenswrapper[4634]: I0929 14:06:14.976047 4634 generic.go:334] "Generic (PLEG): container finished" podID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerID="9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9" exitCode=2 Sep 29 14:06:14 crc kubenswrapper[4634]: I0929 14:06:14.976059 4634 generic.go:334] "Generic (PLEG): container finished" podID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerID="e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764" exitCode=0 Sep 29 14:06:14 crc kubenswrapper[4634]: I0929 14:06:14.976058 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerDied","Data":"cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96"} Sep 29 14:06:14 crc kubenswrapper[4634]: I0929 14:06:14.976133 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerDied","Data":"9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9"} Sep 29 14:06:14 crc kubenswrapper[4634]: I0929 14:06:14.976145 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerDied","Data":"e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764"} Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.715579 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.758534 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-config-data\") pod \"5841d425-dec2-437b-9b99-f52212dfb5fe\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.758598 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-sg-core-conf-yaml\") pod \"5841d425-dec2-437b-9b99-f52212dfb5fe\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.758769 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fv4t\" (UniqueName: \"kubernetes.io/projected/5841d425-dec2-437b-9b99-f52212dfb5fe-kube-api-access-7fv4t\") pod \"5841d425-dec2-437b-9b99-f52212dfb5fe\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.758863 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-log-httpd\") pod \"5841d425-dec2-437b-9b99-f52212dfb5fe\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.758918 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-run-httpd\") pod \"5841d425-dec2-437b-9b99-f52212dfb5fe\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.758992 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-combined-ca-bundle\") pod \"5841d425-dec2-437b-9b99-f52212dfb5fe\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.759030 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-scripts\") pod \"5841d425-dec2-437b-9b99-f52212dfb5fe\" (UID: \"5841d425-dec2-437b-9b99-f52212dfb5fe\") " Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.766570 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5841d425-dec2-437b-9b99-f52212dfb5fe" (UID: "5841d425-dec2-437b-9b99-f52212dfb5fe"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.766847 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5841d425-dec2-437b-9b99-f52212dfb5fe" (UID: "5841d425-dec2-437b-9b99-f52212dfb5fe"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.787314 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-scripts" (OuterVolumeSpecName: "scripts") pod "5841d425-dec2-437b-9b99-f52212dfb5fe" (UID: "5841d425-dec2-437b-9b99-f52212dfb5fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.790602 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5841d425-dec2-437b-9b99-f52212dfb5fe-kube-api-access-7fv4t" (OuterVolumeSpecName: "kube-api-access-7fv4t") pod "5841d425-dec2-437b-9b99-f52212dfb5fe" (UID: "5841d425-dec2-437b-9b99-f52212dfb5fe"). InnerVolumeSpecName "kube-api-access-7fv4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.807935 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5841d425-dec2-437b-9b99-f52212dfb5fe" (UID: "5841d425-dec2-437b-9b99-f52212dfb5fe"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.861819 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.861844 4634 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.861858 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fv4t\" (UniqueName: \"kubernetes.io/projected/5841d425-dec2-437b-9b99-f52212dfb5fe-kube-api-access-7fv4t\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.861868 4634 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.861877 4634 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5841d425-dec2-437b-9b99-f52212dfb5fe-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.888677 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5841d425-dec2-437b-9b99-f52212dfb5fe" (UID: "5841d425-dec2-437b-9b99-f52212dfb5fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.919093 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-config-data" (OuterVolumeSpecName: "config-data") pod "5841d425-dec2-437b-9b99-f52212dfb5fe" (UID: "5841d425-dec2-437b-9b99-f52212dfb5fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.964071 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:20 crc kubenswrapper[4634]: I0929 14:06:20.964437 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5841d425-dec2-437b-9b99-f52212dfb5fe-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.054394 4634 generic.go:334] "Generic (PLEG): container finished" podID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerID="b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d" exitCode=0 Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.054467 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerDied","Data":"b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d"} Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.054490 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.054516 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5841d425-dec2-437b-9b99-f52212dfb5fe","Type":"ContainerDied","Data":"f1ee56bd57708828a09a59cf42c9e7aa598189cd75cdfc196b66f1364de5e4b7"} Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.054561 4634 scope.go:117] "RemoveContainer" containerID="cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.103200 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.111544 4634 scope.go:117] "RemoveContainer" containerID="9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.129687 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.141453 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:21 crc kubenswrapper[4634]: E0929 14:06:21.142047 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="sg-core" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.142067 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="sg-core" Sep 29 14:06:21 crc kubenswrapper[4634]: E0929 14:06:21.142099 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="proxy-httpd" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.142147 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="proxy-httpd" Sep 29 14:06:21 crc kubenswrapper[4634]: E0929 14:06:21.142182 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="ceilometer-central-agent" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.142189 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="ceilometer-central-agent" Sep 29 14:06:21 crc kubenswrapper[4634]: E0929 14:06:21.142243 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="ceilometer-notification-agent" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.142251 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="ceilometer-notification-agent" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.144260 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="ceilometer-central-agent" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.144294 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="sg-core" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.144312 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="proxy-httpd" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.144324 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" containerName="ceilometer-notification-agent" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.145213 4634 scope.go:117] "RemoveContainer" containerID="e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.159126 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.159255 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.175366 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.175770 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.179318 4634 scope.go:117] "RemoveContainer" containerID="b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.234314 4634 scope.go:117] "RemoveContainer" containerID="cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96" Sep 29 14:06:21 crc kubenswrapper[4634]: E0929 14:06:21.235021 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96\": container with ID starting with cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96 not found: ID does not exist" containerID="cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.235264 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96"} err="failed to get container status \"cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96\": rpc error: code = NotFound desc = could not find container \"cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96\": container with ID starting with cc35bf953223c896619ec6fa5c1b77e8cb074e57477b33ee39df257da2821f96 not found: ID does not exist" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.235336 4634 scope.go:117] "RemoveContainer" containerID="9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9" Sep 29 14:06:21 crc kubenswrapper[4634]: E0929 14:06:21.235883 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9\": container with ID starting with 9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9 not found: ID does not exist" containerID="9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.235945 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9"} err="failed to get container status \"9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9\": rpc error: code = NotFound desc = could not find container \"9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9\": container with ID starting with 9804c1a9e178f5245c1b49933a4c54e2fc134789582b5d65d0bdb8e5649a12c9 not found: ID does not exist" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.235984 4634 scope.go:117] "RemoveContainer" containerID="e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764" Sep 29 14:06:21 crc kubenswrapper[4634]: E0929 14:06:21.236656 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764\": container with ID starting with e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764 not found: ID does not exist" containerID="e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.236697 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764"} err="failed to get container status \"e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764\": rpc error: code = NotFound desc = could not find container \"e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764\": container with ID starting with e4b408fc932120e17c1ff9846cd8c1e2a566e8a2ac8c975652cd8b14f5a0e764 not found: ID does not exist" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.236724 4634 scope.go:117] "RemoveContainer" containerID="b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d" Sep 29 14:06:21 crc kubenswrapper[4634]: E0929 14:06:21.237059 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d\": container with ID starting with b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d not found: ID does not exist" containerID="b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.237117 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d"} err="failed to get container status \"b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d\": rpc error: code = NotFound desc = could not find container \"b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d\": container with ID starting with b654c676298155d9967aa2bdd568b680fe5f57221fa907f674a58457a98fb60d not found: ID does not exist" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.281604 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-log-httpd\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.281669 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-scripts\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.281694 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-run-httpd\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.281724 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-config-data\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.281809 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.281832 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nps7p\" (UniqueName: \"kubernetes.io/projected/a38be6ff-e8e8-4e54-a452-708a4485fa7d-kube-api-access-nps7p\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.281850 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.384393 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.384459 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nps7p\" (UniqueName: \"kubernetes.io/projected/a38be6ff-e8e8-4e54-a452-708a4485fa7d-kube-api-access-nps7p\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.384479 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.384560 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-log-httpd\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.384593 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-scripts\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.384614 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-run-httpd\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.384644 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-config-data\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.386213 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-run-httpd\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.386435 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-log-httpd\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.390050 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.391420 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.403888 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-scripts\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.404166 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-config-data\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.421821 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nps7p\" (UniqueName: \"kubernetes.io/projected/a38be6ff-e8e8-4e54-a452-708a4485fa7d-kube-api-access-nps7p\") pod \"ceilometer-0\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.493216 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:21 crc kubenswrapper[4634]: I0929 14:06:21.776873 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:22 crc kubenswrapper[4634]: I0929 14:06:22.066350 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerStarted","Data":"08577ef09a7751eca20cd77d3487f2a5d5dd6ab8f8c653e58a3081377434f0e4"} Sep 29 14:06:22 crc kubenswrapper[4634]: I0929 14:06:22.128335 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5841d425-dec2-437b-9b99-f52212dfb5fe" path="/var/lib/kubelet/pods/5841d425-dec2-437b-9b99-f52212dfb5fe/volumes" Sep 29 14:06:23 crc kubenswrapper[4634]: I0929 14:06:23.081070 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerStarted","Data":"c95d1729d92f477f9a09bf579c41a5bc8ff27a8fbd5c0f1f3a5c39ba5885a168"} Sep 29 14:06:23 crc kubenswrapper[4634]: I0929 14:06:23.407586 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:24 crc kubenswrapper[4634]: I0929 14:06:24.100810 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerStarted","Data":"864260238c84dc1843acc50f721c543f6d924654f2acb18e6a21f3ce14bbd645"} Sep 29 14:06:25 crc kubenswrapper[4634]: I0929 14:06:25.118223 4634 generic.go:334] "Generic (PLEG): container finished" podID="dec8206e-efde-472c-9342-1ac2e0913508" containerID="30f71aa55e79d407cb936964a158516f3fa28f5592ee3f678476206d3325268e" exitCode=0 Sep 29 14:06:25 crc kubenswrapper[4634]: I0929 14:06:25.118299 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-gjgff" event={"ID":"dec8206e-efde-472c-9342-1ac2e0913508","Type":"ContainerDied","Data":"30f71aa55e79d407cb936964a158516f3fa28f5592ee3f678476206d3325268e"} Sep 29 14:06:25 crc kubenswrapper[4634]: I0929 14:06:25.123473 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerStarted","Data":"e64148d209e022470cddc46facd5b5867b9b4c5d13999bb4fbe8108f8332e0b6"} Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.577940 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.642346 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-scripts\") pod \"dec8206e-efde-472c-9342-1ac2e0913508\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.642424 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-config-data\") pod \"dec8206e-efde-472c-9342-1ac2e0913508\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.642538 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-combined-ca-bundle\") pod \"dec8206e-efde-472c-9342-1ac2e0913508\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.642612 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wkd5\" (UniqueName: \"kubernetes.io/projected/dec8206e-efde-472c-9342-1ac2e0913508-kube-api-access-9wkd5\") pod \"dec8206e-efde-472c-9342-1ac2e0913508\" (UID: \"dec8206e-efde-472c-9342-1ac2e0913508\") " Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.650504 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dec8206e-efde-472c-9342-1ac2e0913508-kube-api-access-9wkd5" (OuterVolumeSpecName: "kube-api-access-9wkd5") pod "dec8206e-efde-472c-9342-1ac2e0913508" (UID: "dec8206e-efde-472c-9342-1ac2e0913508"). InnerVolumeSpecName "kube-api-access-9wkd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.650638 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-scripts" (OuterVolumeSpecName: "scripts") pod "dec8206e-efde-472c-9342-1ac2e0913508" (UID: "dec8206e-efde-472c-9342-1ac2e0913508"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.674064 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-config-data" (OuterVolumeSpecName: "config-data") pod "dec8206e-efde-472c-9342-1ac2e0913508" (UID: "dec8206e-efde-472c-9342-1ac2e0913508"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.680786 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dec8206e-efde-472c-9342-1ac2e0913508" (UID: "dec8206e-efde-472c-9342-1ac2e0913508"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.745375 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.745421 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wkd5\" (UniqueName: \"kubernetes.io/projected/dec8206e-efde-472c-9342-1ac2e0913508-kube-api-access-9wkd5\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.745435 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:26 crc kubenswrapper[4634]: I0929 14:06:26.745446 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dec8206e-efde-472c-9342-1ac2e0913508-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.157694 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-gjgff" event={"ID":"dec8206e-efde-472c-9342-1ac2e0913508","Type":"ContainerDied","Data":"0cf7015a65fc3f931b399daddb0f5ec2611e8fe9508740701f24b5e13f0c5574"} Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.157754 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0cf7015a65fc3f931b399daddb0f5ec2611e8fe9508740701f24b5e13f0c5574" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.157762 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-gjgff" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.161763 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerStarted","Data":"7609dd4461d93ad5fa481d41be90afa830602a9d92248e33f9c2d487c697d493"} Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.161988 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="ceilometer-central-agent" containerID="cri-o://c95d1729d92f477f9a09bf579c41a5bc8ff27a8fbd5c0f1f3a5c39ba5885a168" gracePeriod=30 Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.162040 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.162144 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="proxy-httpd" containerID="cri-o://7609dd4461d93ad5fa481d41be90afa830602a9d92248e33f9c2d487c697d493" gracePeriod=30 Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.162194 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="sg-core" containerID="cri-o://e64148d209e022470cddc46facd5b5867b9b4c5d13999bb4fbe8108f8332e0b6" gracePeriod=30 Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.162250 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="ceilometer-notification-agent" containerID="cri-o://864260238c84dc1843acc50f721c543f6d924654f2acb18e6a21f3ce14bbd645" gracePeriod=30 Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.227557 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.03482176 podStartE2EDuration="6.227526782s" podCreationTimestamp="2025-09-29 14:06:21 +0000 UTC" firstStartedPulling="2025-09-29 14:06:21.786645184 +0000 UTC m=+1312.355372933" lastFinishedPulling="2025-09-29 14:06:25.979350206 +0000 UTC m=+1316.548077955" observedRunningTime="2025-09-29 14:06:27.197190347 +0000 UTC m=+1317.765918096" watchObservedRunningTime="2025-09-29 14:06:27.227526782 +0000 UTC m=+1317.796254531" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.298642 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 14:06:27 crc kubenswrapper[4634]: E0929 14:06:27.302299 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dec8206e-efde-472c-9342-1ac2e0913508" containerName="nova-cell0-conductor-db-sync" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.302335 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="dec8206e-efde-472c-9342-1ac2e0913508" containerName="nova-cell0-conductor-db-sync" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.302652 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="dec8206e-efde-472c-9342-1ac2e0913508" containerName="nova-cell0-conductor-db-sync" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.303911 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.307801 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-w4clr" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.311954 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.326354 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.360522 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad87c2fd-79c4-4931-99d6-bec867ee637e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.360937 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbg6c\" (UniqueName: \"kubernetes.io/projected/ad87c2fd-79c4-4931-99d6-bec867ee637e-kube-api-access-bbg6c\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.361006 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad87c2fd-79c4-4931-99d6-bec867ee637e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.462902 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad87c2fd-79c4-4931-99d6-bec867ee637e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.463044 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad87c2fd-79c4-4931-99d6-bec867ee637e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.463150 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbg6c\" (UniqueName: \"kubernetes.io/projected/ad87c2fd-79c4-4931-99d6-bec867ee637e-kube-api-access-bbg6c\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.469199 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad87c2fd-79c4-4931-99d6-bec867ee637e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.469248 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad87c2fd-79c4-4931-99d6-bec867ee637e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.487077 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbg6c\" (UniqueName: \"kubernetes.io/projected/ad87c2fd-79c4-4931-99d6-bec867ee637e-kube-api-access-bbg6c\") pod \"nova-cell0-conductor-0\" (UID: \"ad87c2fd-79c4-4931-99d6-bec867ee637e\") " pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:27 crc kubenswrapper[4634]: I0929 14:06:27.737398 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:28 crc kubenswrapper[4634]: I0929 14:06:28.213645 4634 generic.go:334] "Generic (PLEG): container finished" podID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerID="7609dd4461d93ad5fa481d41be90afa830602a9d92248e33f9c2d487c697d493" exitCode=0 Sep 29 14:06:28 crc kubenswrapper[4634]: I0929 14:06:28.214045 4634 generic.go:334] "Generic (PLEG): container finished" podID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerID="e64148d209e022470cddc46facd5b5867b9b4c5d13999bb4fbe8108f8332e0b6" exitCode=2 Sep 29 14:06:28 crc kubenswrapper[4634]: I0929 14:06:28.214054 4634 generic.go:334] "Generic (PLEG): container finished" podID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerID="864260238c84dc1843acc50f721c543f6d924654f2acb18e6a21f3ce14bbd645" exitCode=0 Sep 29 14:06:28 crc kubenswrapper[4634]: I0929 14:06:28.213738 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerDied","Data":"7609dd4461d93ad5fa481d41be90afa830602a9d92248e33f9c2d487c697d493"} Sep 29 14:06:28 crc kubenswrapper[4634]: I0929 14:06:28.214114 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerDied","Data":"e64148d209e022470cddc46facd5b5867b9b4c5d13999bb4fbe8108f8332e0b6"} Sep 29 14:06:28 crc kubenswrapper[4634]: I0929 14:06:28.214132 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerDied","Data":"864260238c84dc1843acc50f721c543f6d924654f2acb18e6a21f3ce14bbd645"} Sep 29 14:06:28 crc kubenswrapper[4634]: I0929 14:06:28.535238 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.228591 4634 generic.go:334] "Generic (PLEG): container finished" podID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerID="c95d1729d92f477f9a09bf579c41a5bc8ff27a8fbd5c0f1f3a5c39ba5885a168" exitCode=0 Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.228678 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerDied","Data":"c95d1729d92f477f9a09bf579c41a5bc8ff27a8fbd5c0f1f3a5c39ba5885a168"} Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.231525 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ad87c2fd-79c4-4931-99d6-bec867ee637e","Type":"ContainerStarted","Data":"8e135456d72a26f9e5f65249ad017534c1547aaccb90d3ca98c2fbfb88390519"} Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.231588 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ad87c2fd-79c4-4931-99d6-bec867ee637e","Type":"ContainerStarted","Data":"e5744e83a387ee6d0a3cd0d14ca3a1f8aedb0a23aa9971fa96b90e91a867d9d9"} Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.231670 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.264275 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.264242095 podStartE2EDuration="2.264242095s" podCreationTimestamp="2025-09-29 14:06:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:06:29.254593352 +0000 UTC m=+1319.823321101" watchObservedRunningTime="2025-09-29 14:06:29.264242095 +0000 UTC m=+1319.832969844" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.589543 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.620642 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-scripts\") pod \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.620800 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-combined-ca-bundle\") pod \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.620860 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nps7p\" (UniqueName: \"kubernetes.io/projected/a38be6ff-e8e8-4e54-a452-708a4485fa7d-kube-api-access-nps7p\") pod \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.620896 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-sg-core-conf-yaml\") pod \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.620917 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-run-httpd\") pod \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.621029 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-log-httpd\") pod \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.621094 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-config-data\") pod \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\" (UID: \"a38be6ff-e8e8-4e54-a452-708a4485fa7d\") " Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.622527 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a38be6ff-e8e8-4e54-a452-708a4485fa7d" (UID: "a38be6ff-e8e8-4e54-a452-708a4485fa7d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.622740 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a38be6ff-e8e8-4e54-a452-708a4485fa7d" (UID: "a38be6ff-e8e8-4e54-a452-708a4485fa7d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.645934 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-scripts" (OuterVolumeSpecName: "scripts") pod "a38be6ff-e8e8-4e54-a452-708a4485fa7d" (UID: "a38be6ff-e8e8-4e54-a452-708a4485fa7d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.651593 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a38be6ff-e8e8-4e54-a452-708a4485fa7d-kube-api-access-nps7p" (OuterVolumeSpecName: "kube-api-access-nps7p") pod "a38be6ff-e8e8-4e54-a452-708a4485fa7d" (UID: "a38be6ff-e8e8-4e54-a452-708a4485fa7d"). InnerVolumeSpecName "kube-api-access-nps7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.688104 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a38be6ff-e8e8-4e54-a452-708a4485fa7d" (UID: "a38be6ff-e8e8-4e54-a452-708a4485fa7d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.723620 4634 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.723652 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.723664 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nps7p\" (UniqueName: \"kubernetes.io/projected/a38be6ff-e8e8-4e54-a452-708a4485fa7d-kube-api-access-nps7p\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.723674 4634 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.723684 4634 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a38be6ff-e8e8-4e54-a452-708a4485fa7d-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.773841 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a38be6ff-e8e8-4e54-a452-708a4485fa7d" (UID: "a38be6ff-e8e8-4e54-a452-708a4485fa7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.789075 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-config-data" (OuterVolumeSpecName: "config-data") pod "a38be6ff-e8e8-4e54-a452-708a4485fa7d" (UID: "a38be6ff-e8e8-4e54-a452-708a4485fa7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.825308 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:29 crc kubenswrapper[4634]: I0929 14:06:29.825344 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38be6ff-e8e8-4e54-a452-708a4485fa7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.246048 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a38be6ff-e8e8-4e54-a452-708a4485fa7d","Type":"ContainerDied","Data":"08577ef09a7751eca20cd77d3487f2a5d5dd6ab8f8c653e58a3081377434f0e4"} Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.246094 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.246571 4634 scope.go:117] "RemoveContainer" containerID="7609dd4461d93ad5fa481d41be90afa830602a9d92248e33f9c2d487c697d493" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.278100 4634 scope.go:117] "RemoveContainer" containerID="e64148d209e022470cddc46facd5b5867b9b4c5d13999bb4fbe8108f8332e0b6" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.282325 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.302169 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.314071 4634 scope.go:117] "RemoveContainer" containerID="864260238c84dc1843acc50f721c543f6d924654f2acb18e6a21f3ce14bbd645" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.327236 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:30 crc kubenswrapper[4634]: E0929 14:06:30.328904 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="proxy-httpd" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.328929 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="proxy-httpd" Sep 29 14:06:30 crc kubenswrapper[4634]: E0929 14:06:30.328944 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="ceilometer-central-agent" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.328951 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="ceilometer-central-agent" Sep 29 14:06:30 crc kubenswrapper[4634]: E0929 14:06:30.328969 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="ceilometer-notification-agent" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.328978 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="ceilometer-notification-agent" Sep 29 14:06:30 crc kubenswrapper[4634]: E0929 14:06:30.328994 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="sg-core" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.329000 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="sg-core" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.329228 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="proxy-httpd" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.329239 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="sg-core" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.329257 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="ceilometer-notification-agent" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.329270 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" containerName="ceilometer-central-agent" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.331521 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.339729 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.339990 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.342242 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.367140 4634 scope.go:117] "RemoveContainer" containerID="c95d1729d92f477f9a09bf579c41a5bc8ff27a8fbd5c0f1f3a5c39ba5885a168" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.444343 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-run-httpd\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.444398 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-config-data\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.444421 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.444694 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6262\" (UniqueName: \"kubernetes.io/projected/956b6f4a-f34e-4ab8-a841-d997925e73a4-kube-api-access-b6262\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.444763 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-log-httpd\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.444919 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.444979 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-scripts\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.548205 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-scripts\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.548348 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-run-httpd\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.548403 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-config-data\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.548488 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.548540 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6262\" (UniqueName: \"kubernetes.io/projected/956b6f4a-f34e-4ab8-a841-d997925e73a4-kube-api-access-b6262\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.548569 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-log-httpd\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.548660 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.548912 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-run-httpd\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.549410 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-log-httpd\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.552202 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-scripts\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.552820 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.562914 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-config-data\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.563851 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.566660 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6262\" (UniqueName: \"kubernetes.io/projected/956b6f4a-f34e-4ab8-a841-d997925e73a4-kube-api-access-b6262\") pod \"ceilometer-0\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " pod="openstack/ceilometer-0" Sep 29 14:06:30 crc kubenswrapper[4634]: I0929 14:06:30.666365 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:06:31 crc kubenswrapper[4634]: I0929 14:06:31.173980 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:06:31 crc kubenswrapper[4634]: W0929 14:06:31.184860 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod956b6f4a_f34e_4ab8_a841_d997925e73a4.slice/crio-8aaef87db4d0f97947fd4d5a23d2e94e14d0bec93c8ce9296ac79a33f3073361 WatchSource:0}: Error finding container 8aaef87db4d0f97947fd4d5a23d2e94e14d0bec93c8ce9296ac79a33f3073361: Status 404 returned error can't find the container with id 8aaef87db4d0f97947fd4d5a23d2e94e14d0bec93c8ce9296ac79a33f3073361 Sep 29 14:06:31 crc kubenswrapper[4634]: I0929 14:06:31.257576 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerStarted","Data":"8aaef87db4d0f97947fd4d5a23d2e94e14d0bec93c8ce9296ac79a33f3073361"} Sep 29 14:06:32 crc kubenswrapper[4634]: I0929 14:06:32.124278 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a38be6ff-e8e8-4e54-a452-708a4485fa7d" path="/var/lib/kubelet/pods/a38be6ff-e8e8-4e54-a452-708a4485fa7d/volumes" Sep 29 14:06:32 crc kubenswrapper[4634]: I0929 14:06:32.803336 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="metallb-system/metallb-operator-webhook-server-5ff45f5c66-t6xh9" podUID="03429314-d17f-4ffa-9d58-b89748690fec" containerName="webhook-server" probeResult="failure" output="Get \"http://10.217.0.52:7472/metrics\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:06:36 crc kubenswrapper[4634]: I0929 14:06:36.317126 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerStarted","Data":"8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2"} Sep 29 14:06:37 crc kubenswrapper[4634]: I0929 14:06:37.780148 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.530605 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-x8ss8"] Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.532330 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.535568 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.536331 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.549984 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-x8ss8"] Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.731125 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-scripts\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.731633 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-config-data\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.731675 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.731746 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqbkz\" (UniqueName: \"kubernetes.io/projected/25131260-fc44-4b6a-beb9-98cc9ce0f27d-kube-api-access-sqbkz\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.786101 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.801289 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.809167 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.814464 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.838567 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.838817 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-logs\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.838925 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqbkz\" (UniqueName: \"kubernetes.io/projected/25131260-fc44-4b6a-beb9-98cc9ce0f27d-kube-api-access-sqbkz\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.839029 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pml27\" (UniqueName: \"kubernetes.io/projected/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-kube-api-access-pml27\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.839114 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-scripts\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.839217 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.839311 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-config-data\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.839400 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-config-data\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.862260 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-config-data\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.879419 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-scripts\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.879708 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.939063 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqbkz\" (UniqueName: \"kubernetes.io/projected/25131260-fc44-4b6a-beb9-98cc9ce0f27d-kube-api-access-sqbkz\") pod \"nova-cell0-cell-mapping-x8ss8\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.942184 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pml27\" (UniqueName: \"kubernetes.io/projected/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-kube-api-access-pml27\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.942260 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.942293 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-config-data\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.942372 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-logs\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.942836 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-logs\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.948374 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.950734 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-config-data\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.982150 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:38 crc kubenswrapper[4634]: I0929 14:06:38.985789 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pml27\" (UniqueName: \"kubernetes.io/projected/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-kube-api-access-pml27\") pod \"nova-api-0\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " pod="openstack/nova-api-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.119152 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.121875 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.151147 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.193364 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.314623 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-logs\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.317447 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zprhj\" (UniqueName: \"kubernetes.io/projected/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-kube-api-access-zprhj\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.317720 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.317885 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-config-data\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.370154 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.381332 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.424205 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.427762 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zprhj\" (UniqueName: \"kubernetes.io/projected/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-kube-api-access-zprhj\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.427924 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.427997 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.428056 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-config-data\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.435677 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.449860 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-config-data\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.449991 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qmpz\" (UniqueName: \"kubernetes.io/projected/004847bf-1f65-49c4-b1a4-941f427ceab4-kube-api-access-6qmpz\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.450170 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-logs\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.450833 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-logs\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.459554 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.476306 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-config-data\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.539050 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.554998 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.555060 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-config-data\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.555118 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qmpz\" (UniqueName: \"kubernetes.io/projected/004847bf-1f65-49c4-b1a4-941f427ceab4-kube-api-access-6qmpz\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.556040 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zprhj\" (UniqueName: \"kubernetes.io/projected/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-kube-api-access-zprhj\") pod \"nova-metadata-0\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.576563 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-config-data\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.581072 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.596772 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.598655 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.664474 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.667795 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jlcpr\" (UniqueName: \"kubernetes.io/projected/a29454b6-89c5-496f-af78-230a3ace579f-kube-api-access-jlcpr\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.667947 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.668033 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.678853 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qmpz\" (UniqueName: \"kubernetes.io/projected/004847bf-1f65-49c4-b1a4-941f427ceab4-kube-api-access-6qmpz\") pod \"nova-scheduler-0\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.762808 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.772730 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.772792 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jlcpr\" (UniqueName: \"kubernetes.io/projected/a29454b6-89c5-496f-af78-230a3ace579f-kube-api-access-jlcpr\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.772891 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.781980 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.801721 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-bcgbg"] Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.802722 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.803010 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.808153 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.840901 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.843743 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jlcpr\" (UniqueName: \"kubernetes.io/projected/a29454b6-89c5-496f-af78-230a3ace579f-kube-api-access-jlcpr\") pod \"nova-cell1-novncproxy-0\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.877099 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftf2m\" (UniqueName: \"kubernetes.io/projected/b390039e-04d4-492f-b211-091f63ab658b-kube-api-access-ftf2m\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.877153 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.877227 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.877266 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.877360 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-config\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.877380 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.878727 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-bcgbg"] Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.984142 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-config\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.984212 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.984263 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftf2m\" (UniqueName: \"kubernetes.io/projected/b390039e-04d4-492f-b211-091f63ab658b-kube-api-access-ftf2m\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.984294 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.984367 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:39 crc kubenswrapper[4634]: I0929 14:06:39.984413 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.030676 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftf2m\" (UniqueName: \"kubernetes.io/projected/b390039e-04d4-492f-b211-091f63ab658b-kube-api-access-ftf2m\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.068101 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.191247 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-x8ss8"] Sep 29 14:06:40 crc kubenswrapper[4634]: W0929 14:06:40.241275 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25131260_fc44_4b6a_beb9_98cc9ce0f27d.slice/crio-5218fa3305f52058f7ef47f3fb2a472ad36f2087698db258327d743d6f47f414 WatchSource:0}: Error finding container 5218fa3305f52058f7ef47f3fb2a472ad36f2087698db258327d743d6f47f414: Status 404 returned error can't find the container with id 5218fa3305f52058f7ef47f3fb2a472ad36f2087698db258327d743d6f47f414 Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.289227 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-config\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.290396 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.290462 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.290900 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.291385 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-bcgbg\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.471762 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:40 crc kubenswrapper[4634]: I0929 14:06:40.503278 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:06:41 crc kubenswrapper[4634]: W0929 14:06:40.576596 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf23fba9b_a0c4_42c1_80aa_a1039e40bdf9.slice/crio-89cf2eb1dfeb1b632da71f60d9355720946c7a30879067a5b2f3679c6af16f45 WatchSource:0}: Error finding container 89cf2eb1dfeb1b632da71f60d9355720946c7a30879067a5b2f3679c6af16f45: Status 404 returned error can't find the container with id 89cf2eb1dfeb1b632da71f60d9355720946c7a30879067a5b2f3679c6af16f45 Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:40.582433 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerStarted","Data":"fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a"} Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:40.600299 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-x8ss8" event={"ID":"25131260-fc44-4b6a-beb9-98cc9ce0f27d","Type":"ContainerStarted","Data":"5218fa3305f52058f7ef47f3fb2a472ad36f2087698db258327d743d6f47f414"} Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.074126 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-drmmw"] Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.075976 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.079016 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.079106 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.105594 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-drmmw"] Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.135150 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-scripts\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.135247 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-config-data\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.135278 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.135324 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6xnx\" (UniqueName: \"kubernetes.io/projected/0a923015-986d-4efd-9f6d-dcae7f51d7a2-kube-api-access-j6xnx\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.157916 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.184524 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.237180 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-scripts\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.237252 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-config-data\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.237304 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.237342 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6xnx\" (UniqueName: \"kubernetes.io/projected/0a923015-986d-4efd-9f6d-dcae7f51d7a2-kube-api-access-j6xnx\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.258151 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-config-data\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.262723 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-scripts\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.263223 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.272258 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6xnx\" (UniqueName: \"kubernetes.io/projected/0a923015-986d-4efd-9f6d-dcae7f51d7a2-kube-api-access-j6xnx\") pod \"nova-cell1-conductor-db-sync-drmmw\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.396482 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.524529 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.594652 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-bcgbg"] Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.648346 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"004847bf-1f65-49c4-b1a4-941f427ceab4","Type":"ContainerStarted","Data":"50776e9922c57fab6ccd9517e1379fb1886a9b507e8fd2b42540d4d77bb54206"} Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.651828 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9","Type":"ContainerStarted","Data":"89cf2eb1dfeb1b632da71f60d9355720946c7a30879067a5b2f3679c6af16f45"} Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.663472 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-x8ss8" event={"ID":"25131260-fc44-4b6a-beb9-98cc9ce0f27d","Type":"ContainerStarted","Data":"ba446259fee40f5bfbfe0a21049dd3f8566c7d82243fd2bd3137d51fcaf4b104"} Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.667887 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97b5adfc-d6c6-4295-94f8-dc8118d32e8e","Type":"ContainerStarted","Data":"390aed9e16be28878ce7aa2a8b3debe927162f2f3e9e14d0d83e4e42b67a1cad"} Sep 29 14:06:41 crc kubenswrapper[4634]: I0929 14:06:41.669928 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a29454b6-89c5-496f-af78-230a3ace579f","Type":"ContainerStarted","Data":"cdce7f969454a54baafa55c2f53ae41cbbc1022f0ed252bcf84269b685426ff0"} Sep 29 14:06:42 crc kubenswrapper[4634]: I0929 14:06:42.083839 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-drmmw"] Sep 29 14:06:42 crc kubenswrapper[4634]: W0929 14:06:42.088307 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a923015_986d_4efd_9f6d_dcae7f51d7a2.slice/crio-08d7745df54dc7b1c8440e00fddaa77ca1649c5647a596e4d8f203e1bee2214a WatchSource:0}: Error finding container 08d7745df54dc7b1c8440e00fddaa77ca1649c5647a596e4d8f203e1bee2214a: Status 404 returned error can't find the container with id 08d7745df54dc7b1c8440e00fddaa77ca1649c5647a596e4d8f203e1bee2214a Sep 29 14:06:42 crc kubenswrapper[4634]: I0929 14:06:42.689936 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-drmmw" event={"ID":"0a923015-986d-4efd-9f6d-dcae7f51d7a2","Type":"ContainerStarted","Data":"d1c4161a80399153cfcef26a83f09da2660c377b67fa74973462031b86b568e1"} Sep 29 14:06:42 crc kubenswrapper[4634]: I0929 14:06:42.690659 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-drmmw" event={"ID":"0a923015-986d-4efd-9f6d-dcae7f51d7a2","Type":"ContainerStarted","Data":"08d7745df54dc7b1c8440e00fddaa77ca1649c5647a596e4d8f203e1bee2214a"} Sep 29 14:06:42 crc kubenswrapper[4634]: I0929 14:06:42.695102 4634 generic.go:334] "Generic (PLEG): container finished" podID="b390039e-04d4-492f-b211-091f63ab658b" containerID="ca674c40459b1f6c1f31b96187b68076a7e643622dc2a5a31b1d9157378dd40c" exitCode=0 Sep 29 14:06:42 crc kubenswrapper[4634]: I0929 14:06:42.695416 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" event={"ID":"b390039e-04d4-492f-b211-091f63ab658b","Type":"ContainerDied","Data":"ca674c40459b1f6c1f31b96187b68076a7e643622dc2a5a31b1d9157378dd40c"} Sep 29 14:06:42 crc kubenswrapper[4634]: I0929 14:06:42.695444 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" event={"ID":"b390039e-04d4-492f-b211-091f63ab658b","Type":"ContainerStarted","Data":"ebebcadbd8fecb9f3eb5089d0e56c19d04f74dbf4e5a025b227382b5abc7e128"} Sep 29 14:06:42 crc kubenswrapper[4634]: I0929 14:06:42.719972 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-drmmw" podStartSLOduration=1.7199519140000001 podStartE2EDuration="1.719951914s" podCreationTimestamp="2025-09-29 14:06:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:06:42.71980575 +0000 UTC m=+1333.288533499" watchObservedRunningTime="2025-09-29 14:06:42.719951914 +0000 UTC m=+1333.288679663" Sep 29 14:06:42 crc kubenswrapper[4634]: I0929 14:06:42.816028 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-x8ss8" podStartSLOduration=4.816003881 podStartE2EDuration="4.816003881s" podCreationTimestamp="2025-09-29 14:06:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:06:42.803316239 +0000 UTC m=+1333.372043998" watchObservedRunningTime="2025-09-29 14:06:42.816003881 +0000 UTC m=+1333.384731640" Sep 29 14:06:43 crc kubenswrapper[4634]: I0929 14:06:43.716791 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerStarted","Data":"e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d"} Sep 29 14:06:43 crc kubenswrapper[4634]: I0929 14:06:43.723466 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" event={"ID":"b390039e-04d4-492f-b211-091f63ab658b","Type":"ContainerStarted","Data":"c5f6148230cfbaeb5f7a734e7f04c62a5daa46cddddaf9a16d2c517a6b04c2a3"} Sep 29 14:06:43 crc kubenswrapper[4634]: I0929 14:06:43.723543 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:43 crc kubenswrapper[4634]: I0929 14:06:43.746930 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" podStartSLOduration=4.746909331 podStartE2EDuration="4.746909331s" podCreationTimestamp="2025-09-29 14:06:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:06:43.745662748 +0000 UTC m=+1334.314390497" watchObservedRunningTime="2025-09-29 14:06:43.746909331 +0000 UTC m=+1334.315637080" Sep 29 14:06:44 crc kubenswrapper[4634]: I0929 14:06:44.157666 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:44 crc kubenswrapper[4634]: I0929 14:06:44.179864 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:06:44 crc kubenswrapper[4634]: I0929 14:06:44.396353 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:06:44 crc kubenswrapper[4634]: I0929 14:06:44.396433 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.787603 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerStarted","Data":"47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c"} Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.788405 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.789270 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"004847bf-1f65-49c4-b1a4-941f427ceab4","Type":"ContainerStarted","Data":"ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c"} Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.791661 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9","Type":"ContainerStarted","Data":"bca0bf6bf996232f1a055e282b28a699731f6901d1cf16e22c802976c29efccc"} Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.792879 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9","Type":"ContainerStarted","Data":"cdcfc1552f70f88834fecf2ad41ed0e21899e4b0cb631a75f9e47eb595538d15"} Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.794818 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97b5adfc-d6c6-4295-94f8-dc8118d32e8e","Type":"ContainerStarted","Data":"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e"} Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.794964 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97b5adfc-d6c6-4295-94f8-dc8118d32e8e","Type":"ContainerStarted","Data":"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835"} Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.795225 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerName="nova-metadata-log" containerID="cri-o://e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835" gracePeriod=30 Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.795374 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerName="nova-metadata-metadata" containerID="cri-o://f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e" gracePeriod=30 Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.807071 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a29454b6-89c5-496f-af78-230a3ace579f","Type":"ContainerStarted","Data":"f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a"} Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.807508 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="a29454b6-89c5-496f-af78-230a3ace579f" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a" gracePeriod=30 Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.840044 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.408606682 podStartE2EDuration="17.840015024s" podCreationTimestamp="2025-09-29 14:06:30 +0000 UTC" firstStartedPulling="2025-09-29 14:06:31.188150992 +0000 UTC m=+1321.756878741" lastFinishedPulling="2025-09-29 14:06:46.619559294 +0000 UTC m=+1337.188287083" observedRunningTime="2025-09-29 14:06:47.815150732 +0000 UTC m=+1338.383878481" watchObservedRunningTime="2025-09-29 14:06:47.840015024 +0000 UTC m=+1338.408742773" Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.885048 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.392596424 podStartE2EDuration="8.885013443s" podCreationTimestamp="2025-09-29 14:06:39 +0000 UTC" firstStartedPulling="2025-09-29 14:06:41.171053017 +0000 UTC m=+1331.739780766" lastFinishedPulling="2025-09-29 14:06:46.663470016 +0000 UTC m=+1337.232197785" observedRunningTime="2025-09-29 14:06:47.850694684 +0000 UTC m=+1338.419422453" watchObservedRunningTime="2025-09-29 14:06:47.885013443 +0000 UTC m=+1338.453741192" Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.891877 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.3870435580000002 podStartE2EDuration="8.891857272s" podCreationTimestamp="2025-09-29 14:06:39 +0000 UTC" firstStartedPulling="2025-09-29 14:06:41.160972023 +0000 UTC m=+1331.729699772" lastFinishedPulling="2025-09-29 14:06:46.665785717 +0000 UTC m=+1337.234513486" observedRunningTime="2025-09-29 14:06:47.886488341 +0000 UTC m=+1338.455216090" watchObservedRunningTime="2025-09-29 14:06:47.891857272 +0000 UTC m=+1338.460585021" Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.931659 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.806577444 podStartE2EDuration="8.931634604s" podCreationTimestamp="2025-09-29 14:06:39 +0000 UTC" firstStartedPulling="2025-09-29 14:06:41.54011132 +0000 UTC m=+1332.108839069" lastFinishedPulling="2025-09-29 14:06:46.66516848 +0000 UTC m=+1337.233896229" observedRunningTime="2025-09-29 14:06:47.920636907 +0000 UTC m=+1338.489364656" watchObservedRunningTime="2025-09-29 14:06:47.931634604 +0000 UTC m=+1338.500362353" Sep 29 14:06:47 crc kubenswrapper[4634]: I0929 14:06:47.961098 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.894077748 podStartE2EDuration="9.961069026s" podCreationTimestamp="2025-09-29 14:06:38 +0000 UTC" firstStartedPulling="2025-09-29 14:06:40.606729236 +0000 UTC m=+1331.175456985" lastFinishedPulling="2025-09-29 14:06:46.673720514 +0000 UTC m=+1337.242448263" observedRunningTime="2025-09-29 14:06:47.95663624 +0000 UTC m=+1338.525363989" watchObservedRunningTime="2025-09-29 14:06:47.961069026 +0000 UTC m=+1338.529796775" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.570239 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.676253 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-combined-ca-bundle\") pod \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.676409 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-logs\") pod \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.676564 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-config-data\") pod \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.676585 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zprhj\" (UniqueName: \"kubernetes.io/projected/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-kube-api-access-zprhj\") pod \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\" (UID: \"97b5adfc-d6c6-4295-94f8-dc8118d32e8e\") " Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.676850 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-logs" (OuterVolumeSpecName: "logs") pod "97b5adfc-d6c6-4295-94f8-dc8118d32e8e" (UID: "97b5adfc-d6c6-4295-94f8-dc8118d32e8e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.677831 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.700512 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-kube-api-access-zprhj" (OuterVolumeSpecName: "kube-api-access-zprhj") pod "97b5adfc-d6c6-4295-94f8-dc8118d32e8e" (UID: "97b5adfc-d6c6-4295-94f8-dc8118d32e8e"). InnerVolumeSpecName "kube-api-access-zprhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.714855 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-config-data" (OuterVolumeSpecName: "config-data") pod "97b5adfc-d6c6-4295-94f8-dc8118d32e8e" (UID: "97b5adfc-d6c6-4295-94f8-dc8118d32e8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.718218 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97b5adfc-d6c6-4295-94f8-dc8118d32e8e" (UID: "97b5adfc-d6c6-4295-94f8-dc8118d32e8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.781845 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.781884 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.781893 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zprhj\" (UniqueName: \"kubernetes.io/projected/97b5adfc-d6c6-4295-94f8-dc8118d32e8e-kube-api-access-zprhj\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.820386 4634 generic.go:334] "Generic (PLEG): container finished" podID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerID="f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e" exitCode=0 Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.820755 4634 generic.go:334] "Generic (PLEG): container finished" podID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerID="e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835" exitCode=143 Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.820663 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.820557 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97b5adfc-d6c6-4295-94f8-dc8118d32e8e","Type":"ContainerDied","Data":"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e"} Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.821309 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97b5adfc-d6c6-4295-94f8-dc8118d32e8e","Type":"ContainerDied","Data":"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835"} Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.821358 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"97b5adfc-d6c6-4295-94f8-dc8118d32e8e","Type":"ContainerDied","Data":"390aed9e16be28878ce7aa2a8b3debe927162f2f3e9e14d0d83e4e42b67a1cad"} Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.821387 4634 scope.go:117] "RemoveContainer" containerID="f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.857962 4634 scope.go:117] "RemoveContainer" containerID="e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.878577 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.889046 4634 scope.go:117] "RemoveContainer" containerID="f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e" Sep 29 14:06:48 crc kubenswrapper[4634]: E0929 14:06:48.893233 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e\": container with ID starting with f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e not found: ID does not exist" containerID="f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.893297 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e"} err="failed to get container status \"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e\": rpc error: code = NotFound desc = could not find container \"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e\": container with ID starting with f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e not found: ID does not exist" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.893343 4634 scope.go:117] "RemoveContainer" containerID="e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835" Sep 29 14:06:48 crc kubenswrapper[4634]: E0929 14:06:48.893930 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835\": container with ID starting with e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835 not found: ID does not exist" containerID="e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.893988 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835"} err="failed to get container status \"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835\": rpc error: code = NotFound desc = could not find container \"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835\": container with ID starting with e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835 not found: ID does not exist" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.894039 4634 scope.go:117] "RemoveContainer" containerID="f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.894777 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.894902 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e"} err="failed to get container status \"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e\": rpc error: code = NotFound desc = could not find container \"f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e\": container with ID starting with f31cb344edcbd20b6ae43157eefadad4c259cd4b9d028bb0c08f122a47e48f7e not found: ID does not exist" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.894929 4634 scope.go:117] "RemoveContainer" containerID="e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.895236 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835"} err="failed to get container status \"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835\": rpc error: code = NotFound desc = could not find container \"e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835\": container with ID starting with e67d7fd633eb686f9009fd23b7bfc879bcfcd7e1733af97026b9f9babcdb8835 not found: ID does not exist" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.904098 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:48 crc kubenswrapper[4634]: E0929 14:06:48.905378 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerName="nova-metadata-metadata" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.905413 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerName="nova-metadata-metadata" Sep 29 14:06:48 crc kubenswrapper[4634]: E0929 14:06:48.905453 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerName="nova-metadata-log" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.905462 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerName="nova-metadata-log" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.905676 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerName="nova-metadata-log" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.905697 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" containerName="nova-metadata-metadata" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.906932 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.910159 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.910411 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.923650 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.986265 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.986357 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfsr6\" (UniqueName: \"kubernetes.io/projected/2edc859b-b252-4cd9-8d0e-299f5ef3a487-kube-api-access-kfsr6\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.986395 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-config-data\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.986493 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:48 crc kubenswrapper[4634]: I0929 14:06:48.986652 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2edc859b-b252-4cd9-8d0e-299f5ef3a487-logs\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.088776 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2edc859b-b252-4cd9-8d0e-299f5ef3a487-logs\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.088870 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.088906 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfsr6\" (UniqueName: \"kubernetes.io/projected/2edc859b-b252-4cd9-8d0e-299f5ef3a487-kube-api-access-kfsr6\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.088930 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-config-data\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.088990 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.089365 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2edc859b-b252-4cd9-8d0e-299f5ef3a487-logs\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.094222 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.094752 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.105983 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-config-data\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.117777 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfsr6\" (UniqueName: \"kubernetes.io/projected/2edc859b-b252-4cd9-8d0e-299f5ef3a487-kube-api-access-kfsr6\") pod \"nova-metadata-0\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.165376 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.165457 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.275977 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.842124 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.844149 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.889923 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 14:06:49 crc kubenswrapper[4634]: I0929 14:06:49.958821 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:50 crc kubenswrapper[4634]: W0929 14:06:50.013509 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2edc859b_b252_4cd9_8d0e_299f5ef3a487.slice/crio-e85b4d9b722cbe4022d6b1779c487941671e537305252f5ddadc19d75d2d5bff WatchSource:0}: Error finding container e85b4d9b722cbe4022d6b1779c487941671e537305252f5ddadc19d75d2d5bff: Status 404 returned error can't find the container with id e85b4d9b722cbe4022d6b1779c487941671e537305252f5ddadc19d75d2d5bff Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.070415 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.139246 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97b5adfc-d6c6-4295-94f8-dc8118d32e8e" path="/var/lib/kubelet/pods/97b5adfc-d6c6-4295-94f8-dc8118d32e8e/volumes" Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.248469 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.248496 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.473339 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.548876 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-smwzr"] Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.550060 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" podUID="efd571a2-8f7f-4962-ae08-c415dbaab95e" containerName="dnsmasq-dns" containerID="cri-o://485059c2feadad66c844ccb92b6d4ae98c977bb5fb6f94fd4900b817d764006b" gracePeriod=10 Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.865450 4634 generic.go:334] "Generic (PLEG): container finished" podID="efd571a2-8f7f-4962-ae08-c415dbaab95e" containerID="485059c2feadad66c844ccb92b6d4ae98c977bb5fb6f94fd4900b817d764006b" exitCode=0 Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.865517 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" event={"ID":"efd571a2-8f7f-4962-ae08-c415dbaab95e","Type":"ContainerDied","Data":"485059c2feadad66c844ccb92b6d4ae98c977bb5fb6f94fd4900b817d764006b"} Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.872864 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2edc859b-b252-4cd9-8d0e-299f5ef3a487","Type":"ContainerStarted","Data":"44a200f7f4db0b3a7d61ff8ef6f9816d682694936a41b353325326360b187310"} Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.872918 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2edc859b-b252-4cd9-8d0e-299f5ef3a487","Type":"ContainerStarted","Data":"65df7ea38fb7da168d0a36acf88c2ab06cd7c095850caaff7377ea59752e4d9a"} Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.872933 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2edc859b-b252-4cd9-8d0e-299f5ef3a487","Type":"ContainerStarted","Data":"e85b4d9b722cbe4022d6b1779c487941671e537305252f5ddadc19d75d2d5bff"} Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.902272 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.902239146 podStartE2EDuration="2.902239146s" podCreationTimestamp="2025-09-29 14:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:06:50.899507084 +0000 UTC m=+1341.468234833" watchObservedRunningTime="2025-09-29 14:06:50.902239146 +0000 UTC m=+1341.470966895" Sep 29 14:06:50 crc kubenswrapper[4634]: I0929 14:06:50.931554 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.350161 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.471802 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-config\") pod \"efd571a2-8f7f-4962-ae08-c415dbaab95e\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.471947 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-965sj\" (UniqueName: \"kubernetes.io/projected/efd571a2-8f7f-4962-ae08-c415dbaab95e-kube-api-access-965sj\") pod \"efd571a2-8f7f-4962-ae08-c415dbaab95e\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.472051 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-svc\") pod \"efd571a2-8f7f-4962-ae08-c415dbaab95e\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.472220 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-sb\") pod \"efd571a2-8f7f-4962-ae08-c415dbaab95e\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.472260 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-nb\") pod \"efd571a2-8f7f-4962-ae08-c415dbaab95e\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.472360 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-swift-storage-0\") pod \"efd571a2-8f7f-4962-ae08-c415dbaab95e\" (UID: \"efd571a2-8f7f-4962-ae08-c415dbaab95e\") " Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.493325 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efd571a2-8f7f-4962-ae08-c415dbaab95e-kube-api-access-965sj" (OuterVolumeSpecName: "kube-api-access-965sj") pod "efd571a2-8f7f-4962-ae08-c415dbaab95e" (UID: "efd571a2-8f7f-4962-ae08-c415dbaab95e"). InnerVolumeSpecName "kube-api-access-965sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.572178 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "efd571a2-8f7f-4962-ae08-c415dbaab95e" (UID: "efd571a2-8f7f-4962-ae08-c415dbaab95e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.576640 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.576667 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-965sj\" (UniqueName: \"kubernetes.io/projected/efd571a2-8f7f-4962-ae08-c415dbaab95e-kube-api-access-965sj\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.610844 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "efd571a2-8f7f-4962-ae08-c415dbaab95e" (UID: "efd571a2-8f7f-4962-ae08-c415dbaab95e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.611065 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "efd571a2-8f7f-4962-ae08-c415dbaab95e" (UID: "efd571a2-8f7f-4962-ae08-c415dbaab95e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.637403 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "efd571a2-8f7f-4962-ae08-c415dbaab95e" (UID: "efd571a2-8f7f-4962-ae08-c415dbaab95e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.661536 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-config" (OuterVolumeSpecName: "config") pod "efd571a2-8f7f-4962-ae08-c415dbaab95e" (UID: "efd571a2-8f7f-4962-ae08-c415dbaab95e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.679012 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.679053 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.679066 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.679075 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/efd571a2-8f7f-4962-ae08-c415dbaab95e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.904724 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.906250 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-smwzr" event={"ID":"efd571a2-8f7f-4962-ae08-c415dbaab95e","Type":"ContainerDied","Data":"fe564f054c40a0bd4313c18f1838786fab4589a30b91e4baeceb2a84eba9049d"} Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.906314 4634 scope.go:117] "RemoveContainer" containerID="485059c2feadad66c844ccb92b6d4ae98c977bb5fb6f94fd4900b817d764006b" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.964990 4634 scope.go:117] "RemoveContainer" containerID="fd07837d26e813a34d0297e23190b955a3e4b07ed08d8e9434a2ac26ca3ee485" Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.966494 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-smwzr"] Sep 29 14:06:51 crc kubenswrapper[4634]: I0929 14:06:51.978058 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-smwzr"] Sep 29 14:06:52 crc kubenswrapper[4634]: I0929 14:06:52.152423 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efd571a2-8f7f-4962-ae08-c415dbaab95e" path="/var/lib/kubelet/pods/efd571a2-8f7f-4962-ae08-c415dbaab95e/volumes" Sep 29 14:06:54 crc kubenswrapper[4634]: I0929 14:06:54.277540 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 14:06:54 crc kubenswrapper[4634]: I0929 14:06:54.278892 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 14:06:54 crc kubenswrapper[4634]: I0929 14:06:54.939434 4634 generic.go:334] "Generic (PLEG): container finished" podID="0a923015-986d-4efd-9f6d-dcae7f51d7a2" containerID="d1c4161a80399153cfcef26a83f09da2660c377b67fa74973462031b86b568e1" exitCode=0 Sep 29 14:06:54 crc kubenswrapper[4634]: I0929 14:06:54.939482 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-drmmw" event={"ID":"0a923015-986d-4efd-9f6d-dcae7f51d7a2","Type":"ContainerDied","Data":"d1c4161a80399153cfcef26a83f09da2660c377b67fa74973462031b86b568e1"} Sep 29 14:06:55 crc kubenswrapper[4634]: I0929 14:06:55.956359 4634 generic.go:334] "Generic (PLEG): container finished" podID="25131260-fc44-4b6a-beb9-98cc9ce0f27d" containerID="ba446259fee40f5bfbfe0a21049dd3f8566c7d82243fd2bd3137d51fcaf4b104" exitCode=0 Sep 29 14:06:55 crc kubenswrapper[4634]: I0929 14:06:55.956448 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-x8ss8" event={"ID":"25131260-fc44-4b6a-beb9-98cc9ce0f27d","Type":"ContainerDied","Data":"ba446259fee40f5bfbfe0a21049dd3f8566c7d82243fd2bd3137d51fcaf4b104"} Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.407541 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.501006 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6xnx\" (UniqueName: \"kubernetes.io/projected/0a923015-986d-4efd-9f6d-dcae7f51d7a2-kube-api-access-j6xnx\") pod \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.501505 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-config-data\") pod \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.501617 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-scripts\") pod \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.501656 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-combined-ca-bundle\") pod \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\" (UID: \"0a923015-986d-4efd-9f6d-dcae7f51d7a2\") " Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.511198 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a923015-986d-4efd-9f6d-dcae7f51d7a2-kube-api-access-j6xnx" (OuterVolumeSpecName: "kube-api-access-j6xnx") pod "0a923015-986d-4efd-9f6d-dcae7f51d7a2" (UID: "0a923015-986d-4efd-9f6d-dcae7f51d7a2"). InnerVolumeSpecName "kube-api-access-j6xnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.514851 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-scripts" (OuterVolumeSpecName: "scripts") pod "0a923015-986d-4efd-9f6d-dcae7f51d7a2" (UID: "0a923015-986d-4efd-9f6d-dcae7f51d7a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.533122 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-config-data" (OuterVolumeSpecName: "config-data") pod "0a923015-986d-4efd-9f6d-dcae7f51d7a2" (UID: "0a923015-986d-4efd-9f6d-dcae7f51d7a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.551450 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a923015-986d-4efd-9f6d-dcae7f51d7a2" (UID: "0a923015-986d-4efd-9f6d-dcae7f51d7a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.605499 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.605543 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.605562 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6xnx\" (UniqueName: \"kubernetes.io/projected/0a923015-986d-4efd-9f6d-dcae7f51d7a2-kube-api-access-j6xnx\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.605577 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a923015-986d-4efd-9f6d-dcae7f51d7a2-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.973264 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-drmmw" event={"ID":"0a923015-986d-4efd-9f6d-dcae7f51d7a2","Type":"ContainerDied","Data":"08d7745df54dc7b1c8440e00fddaa77ca1649c5647a596e4d8f203e1bee2214a"} Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.973311 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-drmmw" Sep 29 14:06:56 crc kubenswrapper[4634]: I0929 14:06:56.973321 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08d7745df54dc7b1c8440e00fddaa77ca1649c5647a596e4d8f203e1bee2214a" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.133545 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 14:06:57 crc kubenswrapper[4634]: E0929 14:06:57.134402 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd571a2-8f7f-4962-ae08-c415dbaab95e" containerName="init" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.134428 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd571a2-8f7f-4962-ae08-c415dbaab95e" containerName="init" Sep 29 14:06:57 crc kubenswrapper[4634]: E0929 14:06:57.134444 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a923015-986d-4efd-9f6d-dcae7f51d7a2" containerName="nova-cell1-conductor-db-sync" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.134451 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a923015-986d-4efd-9f6d-dcae7f51d7a2" containerName="nova-cell1-conductor-db-sync" Sep 29 14:06:57 crc kubenswrapper[4634]: E0929 14:06:57.134477 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd571a2-8f7f-4962-ae08-c415dbaab95e" containerName="dnsmasq-dns" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.134483 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd571a2-8f7f-4962-ae08-c415dbaab95e" containerName="dnsmasq-dns" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.134718 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a923015-986d-4efd-9f6d-dcae7f51d7a2" containerName="nova-cell1-conductor-db-sync" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.134735 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="efd571a2-8f7f-4962-ae08-c415dbaab95e" containerName="dnsmasq-dns" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.136147 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.144811 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.152706 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.225059 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0eded61-0572-44d1-8d17-78191173c99f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.225402 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0eded61-0572-44d1-8d17-78191173c99f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.225475 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xql7j\" (UniqueName: \"kubernetes.io/projected/c0eded61-0572-44d1-8d17-78191173c99f-kube-api-access-xql7j\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.328108 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0eded61-0572-44d1-8d17-78191173c99f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.328548 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0eded61-0572-44d1-8d17-78191173c99f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.328576 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xql7j\" (UniqueName: \"kubernetes.io/projected/c0eded61-0572-44d1-8d17-78191173c99f-kube-api-access-xql7j\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.336526 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0eded61-0572-44d1-8d17-78191173c99f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.337135 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0eded61-0572-44d1-8d17-78191173c99f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.349033 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xql7j\" (UniqueName: \"kubernetes.io/projected/c0eded61-0572-44d1-8d17-78191173c99f-kube-api-access-xql7j\") pod \"nova-cell1-conductor-0\" (UID: \"c0eded61-0572-44d1-8d17-78191173c99f\") " pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.432770 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.464048 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.532162 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-config-data\") pod \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.532653 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-combined-ca-bundle\") pod \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.532724 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqbkz\" (UniqueName: \"kubernetes.io/projected/25131260-fc44-4b6a-beb9-98cc9ce0f27d-kube-api-access-sqbkz\") pod \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.532946 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-scripts\") pod \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\" (UID: \"25131260-fc44-4b6a-beb9-98cc9ce0f27d\") " Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.538686 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-scripts" (OuterVolumeSpecName: "scripts") pod "25131260-fc44-4b6a-beb9-98cc9ce0f27d" (UID: "25131260-fc44-4b6a-beb9-98cc9ce0f27d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.538934 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25131260-fc44-4b6a-beb9-98cc9ce0f27d-kube-api-access-sqbkz" (OuterVolumeSpecName: "kube-api-access-sqbkz") pod "25131260-fc44-4b6a-beb9-98cc9ce0f27d" (UID: "25131260-fc44-4b6a-beb9-98cc9ce0f27d"). InnerVolumeSpecName "kube-api-access-sqbkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.579863 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25131260-fc44-4b6a-beb9-98cc9ce0f27d" (UID: "25131260-fc44-4b6a-beb9-98cc9ce0f27d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.619537 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-config-data" (OuterVolumeSpecName: "config-data") pod "25131260-fc44-4b6a-beb9-98cc9ce0f27d" (UID: "25131260-fc44-4b6a-beb9-98cc9ce0f27d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.640531 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.643945 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqbkz\" (UniqueName: \"kubernetes.io/projected/25131260-fc44-4b6a-beb9-98cc9ce0f27d-kube-api-access-sqbkz\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.643959 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.643970 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25131260-fc44-4b6a-beb9-98cc9ce0f27d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.994659 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-x8ss8" event={"ID":"25131260-fc44-4b6a-beb9-98cc9ce0f27d","Type":"ContainerDied","Data":"5218fa3305f52058f7ef47f3fb2a472ad36f2087698db258327d743d6f47f414"} Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.994790 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-x8ss8" Sep 29 14:06:57 crc kubenswrapper[4634]: I0929 14:06:57.994780 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5218fa3305f52058f7ef47f3fb2a472ad36f2087698db258327d743d6f47f414" Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.079903 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 14:06:58 crc kubenswrapper[4634]: W0929 14:06:58.086678 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0eded61_0572_44d1_8d17_78191173c99f.slice/crio-ffc6182c17ba1b24c4e9655ac75210d9ce8e6ca1d2495cd68feb2c9c21118c22 WatchSource:0}: Error finding container ffc6182c17ba1b24c4e9655ac75210d9ce8e6ca1d2495cd68feb2c9c21118c22: Status 404 returned error can't find the container with id ffc6182c17ba1b24c4e9655ac75210d9ce8e6ca1d2495cd68feb2c9c21118c22 Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.198854 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.201429 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-log" containerID="cri-o://cdcfc1552f70f88834fecf2ad41ed0e21899e4b0cb631a75f9e47eb595538d15" gracePeriod=30 Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.201739 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-api" containerID="cri-o://bca0bf6bf996232f1a055e282b28a699731f6901d1cf16e22c802976c29efccc" gracePeriod=30 Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.253982 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.258588 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="004847bf-1f65-49c4-b1a4-941f427ceab4" containerName="nova-scheduler-scheduler" containerID="cri-o://ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c" gracePeriod=30 Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.289657 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.289929 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerName="nova-metadata-log" containerID="cri-o://65df7ea38fb7da168d0a36acf88c2ab06cd7c095850caaff7377ea59752e4d9a" gracePeriod=30 Sep 29 14:06:58 crc kubenswrapper[4634]: I0929 14:06:58.290143 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerName="nova-metadata-metadata" containerID="cri-o://44a200f7f4db0b3a7d61ff8ef6f9816d682694936a41b353325326360b187310" gracePeriod=30 Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.013973 4634 generic.go:334] "Generic (PLEG): container finished" podID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerID="44a200f7f4db0b3a7d61ff8ef6f9816d682694936a41b353325326360b187310" exitCode=0 Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.015458 4634 generic.go:334] "Generic (PLEG): container finished" podID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerID="65df7ea38fb7da168d0a36acf88c2ab06cd7c095850caaff7377ea59752e4d9a" exitCode=143 Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.014722 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2edc859b-b252-4cd9-8d0e-299f5ef3a487","Type":"ContainerDied","Data":"44a200f7f4db0b3a7d61ff8ef6f9816d682694936a41b353325326360b187310"} Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.015722 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2edc859b-b252-4cd9-8d0e-299f5ef3a487","Type":"ContainerDied","Data":"65df7ea38fb7da168d0a36acf88c2ab06cd7c095850caaff7377ea59752e4d9a"} Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.018983 4634 generic.go:334] "Generic (PLEG): container finished" podID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerID="cdcfc1552f70f88834fecf2ad41ed0e21899e4b0cb631a75f9e47eb595538d15" exitCode=143 Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.019177 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9","Type":"ContainerDied","Data":"cdcfc1552f70f88834fecf2ad41ed0e21899e4b0cb631a75f9e47eb595538d15"} Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.020763 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c0eded61-0572-44d1-8d17-78191173c99f","Type":"ContainerStarted","Data":"d71dbde74b0dd5082f8a89d35bca7737f9bd1c017a1d0c081fbdff175f3a442c"} Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.022107 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.022188 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c0eded61-0572-44d1-8d17-78191173c99f","Type":"ContainerStarted","Data":"ffc6182c17ba1b24c4e9655ac75210d9ce8e6ca1d2495cd68feb2c9c21118c22"} Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.110076 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.144564 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.14454265 podStartE2EDuration="2.14454265s" podCreationTimestamp="2025-09-29 14:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:06:59.069644387 +0000 UTC m=+1349.638372146" watchObservedRunningTime="2025-09-29 14:06:59.14454265 +0000 UTC m=+1349.713270399" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.205329 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-config-data\") pod \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.205388 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-combined-ca-bundle\") pod \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.205530 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-nova-metadata-tls-certs\") pod \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.205618 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfsr6\" (UniqueName: \"kubernetes.io/projected/2edc859b-b252-4cd9-8d0e-299f5ef3a487-kube-api-access-kfsr6\") pod \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.205666 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2edc859b-b252-4cd9-8d0e-299f5ef3a487-logs\") pod \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\" (UID: \"2edc859b-b252-4cd9-8d0e-299f5ef3a487\") " Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.210923 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2edc859b-b252-4cd9-8d0e-299f5ef3a487-logs" (OuterVolumeSpecName: "logs") pod "2edc859b-b252-4cd9-8d0e-299f5ef3a487" (UID: "2edc859b-b252-4cd9-8d0e-299f5ef3a487"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.273335 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2edc859b-b252-4cd9-8d0e-299f5ef3a487-kube-api-access-kfsr6" (OuterVolumeSpecName: "kube-api-access-kfsr6") pod "2edc859b-b252-4cd9-8d0e-299f5ef3a487" (UID: "2edc859b-b252-4cd9-8d0e-299f5ef3a487"). InnerVolumeSpecName "kube-api-access-kfsr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.297975 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2edc859b-b252-4cd9-8d0e-299f5ef3a487" (UID: "2edc859b-b252-4cd9-8d0e-299f5ef3a487"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.303821 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-config-data" (OuterVolumeSpecName: "config-data") pod "2edc859b-b252-4cd9-8d0e-299f5ef3a487" (UID: "2edc859b-b252-4cd9-8d0e-299f5ef3a487"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.309769 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.309816 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.309829 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfsr6\" (UniqueName: \"kubernetes.io/projected/2edc859b-b252-4cd9-8d0e-299f5ef3a487-kube-api-access-kfsr6\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.309841 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2edc859b-b252-4cd9-8d0e-299f5ef3a487-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.313725 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "2edc859b-b252-4cd9-8d0e-299f5ef3a487" (UID: "2edc859b-b252-4cd9-8d0e-299f5ef3a487"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:06:59 crc kubenswrapper[4634]: I0929 14:06:59.412068 4634 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2edc859b-b252-4cd9-8d0e-299f5ef3a487-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:06:59 crc kubenswrapper[4634]: E0929 14:06:59.844591 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 14:06:59 crc kubenswrapper[4634]: E0929 14:06:59.847019 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 14:06:59 crc kubenswrapper[4634]: E0929 14:06:59.848709 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 14:06:59 crc kubenswrapper[4634]: E0929 14:06:59.848758 4634 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="004847bf-1f65-49c4-b1a4-941f427ceab4" containerName="nova-scheduler-scheduler" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.059445 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.060956 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2edc859b-b252-4cd9-8d0e-299f5ef3a487","Type":"ContainerDied","Data":"e85b4d9b722cbe4022d6b1779c487941671e537305252f5ddadc19d75d2d5bff"} Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.061157 4634 scope.go:117] "RemoveContainer" containerID="44a200f7f4db0b3a7d61ff8ef6f9816d682694936a41b353325326360b187310" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.109128 4634 scope.go:117] "RemoveContainer" containerID="65df7ea38fb7da168d0a36acf88c2ab06cd7c095850caaff7377ea59752e4d9a" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.140955 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.161286 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.187186 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:00 crc kubenswrapper[4634]: E0929 14:07:00.187796 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25131260-fc44-4b6a-beb9-98cc9ce0f27d" containerName="nova-manage" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.187819 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="25131260-fc44-4b6a-beb9-98cc9ce0f27d" containerName="nova-manage" Sep 29 14:07:00 crc kubenswrapper[4634]: E0929 14:07:00.187847 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerName="nova-metadata-log" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.187853 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerName="nova-metadata-log" Sep 29 14:07:00 crc kubenswrapper[4634]: E0929 14:07:00.187866 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerName="nova-metadata-metadata" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.187873 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerName="nova-metadata-metadata" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.188118 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="25131260-fc44-4b6a-beb9-98cc9ce0f27d" containerName="nova-manage" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.188140 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerName="nova-metadata-log" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.188155 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" containerName="nova-metadata-metadata" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.189356 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.192909 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.193923 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.195975 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.239490 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.239549 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-config-data\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.239569 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm87b\" (UniqueName: \"kubernetes.io/projected/4027fd6b-2391-4fbc-b503-9e698682b404-kube-api-access-nm87b\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.239628 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.239699 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4027fd6b-2391-4fbc-b503-9e698682b404-logs\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.341781 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.341854 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-config-data\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.341877 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm87b\" (UniqueName: \"kubernetes.io/projected/4027fd6b-2391-4fbc-b503-9e698682b404-kube-api-access-nm87b\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.341936 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.342002 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4027fd6b-2391-4fbc-b503-9e698682b404-logs\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.342539 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4027fd6b-2391-4fbc-b503-9e698682b404-logs\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.349527 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.349926 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-config-data\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.353719 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.363449 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm87b\" (UniqueName: \"kubernetes.io/projected/4027fd6b-2391-4fbc-b503-9e698682b404-kube-api-access-nm87b\") pod \"nova-metadata-0\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.511763 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:07:00 crc kubenswrapper[4634]: I0929 14:07:00.703676 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.097626 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:01 crc kubenswrapper[4634]: W0929 14:07:01.102258 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4027fd6b_2391_4fbc_b503_9e698682b404.slice/crio-d5e86208bb9f34bfa624d4f19fc1e12bc5efbf3cd437d822997e3249ed299b8d WatchSource:0}: Error finding container d5e86208bb9f34bfa624d4f19fc1e12bc5efbf3cd437d822997e3249ed299b8d: Status 404 returned error can't find the container with id d5e86208bb9f34bfa624d4f19fc1e12bc5efbf3cd437d822997e3249ed299b8d Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.579291 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.672680 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qmpz\" (UniqueName: \"kubernetes.io/projected/004847bf-1f65-49c4-b1a4-941f427ceab4-kube-api-access-6qmpz\") pod \"004847bf-1f65-49c4-b1a4-941f427ceab4\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.672784 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-combined-ca-bundle\") pod \"004847bf-1f65-49c4-b1a4-941f427ceab4\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.673184 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-config-data\") pod \"004847bf-1f65-49c4-b1a4-941f427ceab4\" (UID: \"004847bf-1f65-49c4-b1a4-941f427ceab4\") " Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.676802 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/004847bf-1f65-49c4-b1a4-941f427ceab4-kube-api-access-6qmpz" (OuterVolumeSpecName: "kube-api-access-6qmpz") pod "004847bf-1f65-49c4-b1a4-941f427ceab4" (UID: "004847bf-1f65-49c4-b1a4-941f427ceab4"). InnerVolumeSpecName "kube-api-access-6qmpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.707420 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "004847bf-1f65-49c4-b1a4-941f427ceab4" (UID: "004847bf-1f65-49c4-b1a4-941f427ceab4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.714627 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-config-data" (OuterVolumeSpecName: "config-data") pod "004847bf-1f65-49c4-b1a4-941f427ceab4" (UID: "004847bf-1f65-49c4-b1a4-941f427ceab4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.776702 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.776739 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qmpz\" (UniqueName: \"kubernetes.io/projected/004847bf-1f65-49c4-b1a4-941f427ceab4-kube-api-access-6qmpz\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:01 crc kubenswrapper[4634]: I0929 14:07:01.776750 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/004847bf-1f65-49c4-b1a4-941f427ceab4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.095730 4634 generic.go:334] "Generic (PLEG): container finished" podID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerID="bca0bf6bf996232f1a055e282b28a699731f6901d1cf16e22c802976c29efccc" exitCode=0 Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.095903 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9","Type":"ContainerDied","Data":"bca0bf6bf996232f1a055e282b28a699731f6901d1cf16e22c802976c29efccc"} Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.102895 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4027fd6b-2391-4fbc-b503-9e698682b404","Type":"ContainerStarted","Data":"6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b"} Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.102943 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4027fd6b-2391-4fbc-b503-9e698682b404","Type":"ContainerStarted","Data":"e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36"} Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.102957 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4027fd6b-2391-4fbc-b503-9e698682b404","Type":"ContainerStarted","Data":"d5e86208bb9f34bfa624d4f19fc1e12bc5efbf3cd437d822997e3249ed299b8d"} Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.105944 4634 generic.go:334] "Generic (PLEG): container finished" podID="004847bf-1f65-49c4-b1a4-941f427ceab4" containerID="ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c" exitCode=0 Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.105990 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"004847bf-1f65-49c4-b1a4-941f427ceab4","Type":"ContainerDied","Data":"ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c"} Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.106021 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"004847bf-1f65-49c4-b1a4-941f427ceab4","Type":"ContainerDied","Data":"50776e9922c57fab6ccd9517e1379fb1886a9b507e8fd2b42540d4d77bb54206"} Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.106043 4634 scope.go:117] "RemoveContainer" containerID="ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.106225 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.148273 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2edc859b-b252-4cd9-8d0e-299f5ef3a487" path="/var/lib/kubelet/pods/2edc859b-b252-4cd9-8d0e-299f5ef3a487/volumes" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.150075 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.150041886 podStartE2EDuration="2.150041886s" podCreationTimestamp="2025-09-29 14:07:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:02.134146419 +0000 UTC m=+1352.702874168" watchObservedRunningTime="2025-09-29 14:07:02.150041886 +0000 UTC m=+1352.718769635" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.150419 4634 scope.go:117] "RemoveContainer" containerID="ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c" Sep 29 14:07:02 crc kubenswrapper[4634]: E0929 14:07:02.154908 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c\": container with ID starting with ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c not found: ID does not exist" containerID="ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.155003 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c"} err="failed to get container status \"ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c\": rpc error: code = NotFound desc = could not find container \"ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c\": container with ID starting with ba63a2e208d86ee425e1cff576da041b3790c6985b27548b57b4fcc1a09efc0c not found: ID does not exist" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.189521 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.220411 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.276035 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.301478 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-logs\") pod \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.301584 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-config-data\") pod \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.301738 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pml27\" (UniqueName: \"kubernetes.io/projected/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-kube-api-access-pml27\") pod \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.301830 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-combined-ca-bundle\") pod \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\" (UID: \"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9\") " Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.305775 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-logs" (OuterVolumeSpecName: "logs") pod "f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" (UID: "f23fba9b-a0c4-42c1-80aa-a1039e40bdf9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.315855 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:02 crc kubenswrapper[4634]: E0929 14:07:02.316754 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="004847bf-1f65-49c4-b1a4-941f427ceab4" containerName="nova-scheduler-scheduler" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.316777 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="004847bf-1f65-49c4-b1a4-941f427ceab4" containerName="nova-scheduler-scheduler" Sep 29 14:07:02 crc kubenswrapper[4634]: E0929 14:07:02.316833 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-api" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.316842 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-api" Sep 29 14:07:02 crc kubenswrapper[4634]: E0929 14:07:02.316863 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-log" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.316869 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-log" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.317185 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="004847bf-1f65-49c4-b1a4-941f427ceab4" containerName="nova-scheduler-scheduler" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.317215 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-log" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.317239 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" containerName="nova-api-api" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.321026 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-kube-api-access-pml27" (OuterVolumeSpecName: "kube-api-access-pml27") pod "f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" (UID: "f23fba9b-a0c4-42c1-80aa-a1039e40bdf9"). InnerVolumeSpecName "kube-api-access-pml27". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.324849 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.328903 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.354146 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.360027 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" (UID: "f23fba9b-a0c4-42c1-80aa-a1039e40bdf9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.365211 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-config-data" (OuterVolumeSpecName: "config-data") pod "f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" (UID: "f23fba9b-a0c4-42c1-80aa-a1039e40bdf9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.415463 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.415588 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-config-data\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.415615 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7wqr\" (UniqueName: \"kubernetes.io/projected/09857ee0-d835-4c75-9523-ef30395398c6-kube-api-access-v7wqr\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.415746 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.415760 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.415774 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pml27\" (UniqueName: \"kubernetes.io/projected/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-kube-api-access-pml27\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.415786 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.518057 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-config-data\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.518500 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7wqr\" (UniqueName: \"kubernetes.io/projected/09857ee0-d835-4c75-9523-ef30395398c6-kube-api-access-v7wqr\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.519048 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.522979 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-config-data\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.524612 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.548170 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7wqr\" (UniqueName: \"kubernetes.io/projected/09857ee0-d835-4c75-9523-ef30395398c6-kube-api-access-v7wqr\") pod \"nova-scheduler-0\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:02 crc kubenswrapper[4634]: I0929 14:07:02.652923 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.123590 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.123846 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f23fba9b-a0c4-42c1-80aa-a1039e40bdf9","Type":"ContainerDied","Data":"89cf2eb1dfeb1b632da71f60d9355720946c7a30879067a5b2f3679c6af16f45"} Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.124117 4634 scope.go:117] "RemoveContainer" containerID="bca0bf6bf996232f1a055e282b28a699731f6901d1cf16e22c802976c29efccc" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.171696 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.182609 4634 scope.go:117] "RemoveContainer" containerID="cdcfc1552f70f88834fecf2ad41ed0e21899e4b0cb631a75f9e47eb595538d15" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.194162 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:03 crc kubenswrapper[4634]: W0929 14:07:03.196942 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09857ee0_d835_4c75_9523_ef30395398c6.slice/crio-0eb8d49874e6751ae9b80a1405fccc08c169692d5bb77426a2b2049d335f67ee WatchSource:0}: Error finding container 0eb8d49874e6751ae9b80a1405fccc08c169692d5bb77426a2b2049d335f67ee: Status 404 returned error can't find the container with id 0eb8d49874e6751ae9b80a1405fccc08c169692d5bb77426a2b2049d335f67ee Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.210056 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.223307 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.225220 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.229179 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.249636 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.345978 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-config-data\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.346129 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8w4tb\" (UniqueName: \"kubernetes.io/projected/b78c74f5-a9ef-47c7-94ec-17e2614fd854-kube-api-access-8w4tb\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.346184 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b78c74f5-a9ef-47c7-94ec-17e2614fd854-logs\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.346214 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.454764 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-config-data\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.455295 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8w4tb\" (UniqueName: \"kubernetes.io/projected/b78c74f5-a9ef-47c7-94ec-17e2614fd854-kube-api-access-8w4tb\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.455387 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b78c74f5-a9ef-47c7-94ec-17e2614fd854-logs\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.455438 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.456782 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b78c74f5-a9ef-47c7-94ec-17e2614fd854-logs\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.462530 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.462741 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-config-data\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.477723 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8w4tb\" (UniqueName: \"kubernetes.io/projected/b78c74f5-a9ef-47c7-94ec-17e2614fd854-kube-api-access-8w4tb\") pod \"nova-api-0\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " pod="openstack/nova-api-0" Sep 29 14:07:03 crc kubenswrapper[4634]: I0929 14:07:03.556138 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:04 crc kubenswrapper[4634]: I0929 14:07:04.092178 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:04 crc kubenswrapper[4634]: W0929 14:07:04.093127 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb78c74f5_a9ef_47c7_94ec_17e2614fd854.slice/crio-3ac9879fa52488b709a29fd8f1fcf4e846349c1f3ec62ae2587fe8cbe57f549e WatchSource:0}: Error finding container 3ac9879fa52488b709a29fd8f1fcf4e846349c1f3ec62ae2587fe8cbe57f549e: Status 404 returned error can't find the container with id 3ac9879fa52488b709a29fd8f1fcf4e846349c1f3ec62ae2587fe8cbe57f549e Sep 29 14:07:04 crc kubenswrapper[4634]: I0929 14:07:04.136943 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="004847bf-1f65-49c4-b1a4-941f427ceab4" path="/var/lib/kubelet/pods/004847bf-1f65-49c4-b1a4-941f427ceab4/volumes" Sep 29 14:07:04 crc kubenswrapper[4634]: I0929 14:07:04.137892 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f23fba9b-a0c4-42c1-80aa-a1039e40bdf9" path="/var/lib/kubelet/pods/f23fba9b-a0c4-42c1-80aa-a1039e40bdf9/volumes" Sep 29 14:07:04 crc kubenswrapper[4634]: I0929 14:07:04.185483 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"09857ee0-d835-4c75-9523-ef30395398c6","Type":"ContainerStarted","Data":"e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3"} Sep 29 14:07:04 crc kubenswrapper[4634]: I0929 14:07:04.185551 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"09857ee0-d835-4c75-9523-ef30395398c6","Type":"ContainerStarted","Data":"0eb8d49874e6751ae9b80a1405fccc08c169692d5bb77426a2b2049d335f67ee"} Sep 29 14:07:04 crc kubenswrapper[4634]: I0929 14:07:04.207485 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.207465731 podStartE2EDuration="2.207465731s" podCreationTimestamp="2025-09-29 14:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:04.20666238 +0000 UTC m=+1354.775390129" watchObservedRunningTime="2025-09-29 14:07:04.207465731 +0000 UTC m=+1354.776193480" Sep 29 14:07:04 crc kubenswrapper[4634]: I0929 14:07:04.215336 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b78c74f5-a9ef-47c7-94ec-17e2614fd854","Type":"ContainerStarted","Data":"3ac9879fa52488b709a29fd8f1fcf4e846349c1f3ec62ae2587fe8cbe57f549e"} Sep 29 14:07:05 crc kubenswrapper[4634]: I0929 14:07:05.242340 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b78c74f5-a9ef-47c7-94ec-17e2614fd854","Type":"ContainerStarted","Data":"8b0a6b390e2ead94abd83ec76fe372c3f61519f6438079967e67d14dd5c12609"} Sep 29 14:07:05 crc kubenswrapper[4634]: I0929 14:07:05.243012 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b78c74f5-a9ef-47c7-94ec-17e2614fd854","Type":"ContainerStarted","Data":"c7380e058ca02b21cb710ab03d339dec727ed948084e6550cf4c0cb00ca5ba13"} Sep 29 14:07:05 crc kubenswrapper[4634]: I0929 14:07:05.280051 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.280033794 podStartE2EDuration="2.280033794s" podCreationTimestamp="2025-09-29 14:07:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:05.275403763 +0000 UTC m=+1355.844131502" watchObservedRunningTime="2025-09-29 14:07:05.280033794 +0000 UTC m=+1355.848761543" Sep 29 14:07:05 crc kubenswrapper[4634]: I0929 14:07:05.516692 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 14:07:05 crc kubenswrapper[4634]: I0929 14:07:05.518010 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 14:07:06 crc kubenswrapper[4634]: I0929 14:07:06.583852 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:07:06 crc kubenswrapper[4634]: I0929 14:07:06.584551 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="02c51435-cae1-4758-a27a-6e461be7161b" containerName="kube-state-metrics" containerID="cri-o://f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03" gracePeriod=30 Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.260777 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.261390 4634 generic.go:334] "Generic (PLEG): container finished" podID="02c51435-cae1-4758-a27a-6e461be7161b" containerID="f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03" exitCode=2 Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.261462 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"02c51435-cae1-4758-a27a-6e461be7161b","Type":"ContainerDied","Data":"f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03"} Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.261494 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"02c51435-cae1-4758-a27a-6e461be7161b","Type":"ContainerDied","Data":"1e01abd187383088067c5df28335897b611ef8a77d3cdc5939ca90ddd3ad995e"} Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.261515 4634 scope.go:117] "RemoveContainer" containerID="f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03" Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.298737 4634 scope.go:117] "RemoveContainer" containerID="f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03" Sep 29 14:07:07 crc kubenswrapper[4634]: E0929 14:07:07.299302 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03\": container with ID starting with f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03 not found: ID does not exist" containerID="f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03" Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.299335 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03"} err="failed to get container status \"f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03\": rpc error: code = NotFound desc = could not find container \"f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03\": container with ID starting with f5eec0c40f5f54f166ce916ee1840cc925cafbdf8ed6efa9515f37bc908c8b03 not found: ID does not exist" Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.380368 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9ffh\" (UniqueName: \"kubernetes.io/projected/02c51435-cae1-4758-a27a-6e461be7161b-kube-api-access-g9ffh\") pod \"02c51435-cae1-4758-a27a-6e461be7161b\" (UID: \"02c51435-cae1-4758-a27a-6e461be7161b\") " Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.386178 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02c51435-cae1-4758-a27a-6e461be7161b-kube-api-access-g9ffh" (OuterVolumeSpecName: "kube-api-access-g9ffh") pod "02c51435-cae1-4758-a27a-6e461be7161b" (UID: "02c51435-cae1-4758-a27a-6e461be7161b"). InnerVolumeSpecName "kube-api-access-g9ffh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.482480 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9ffh\" (UniqueName: \"kubernetes.io/projected/02c51435-cae1-4758-a27a-6e461be7161b-kube-api-access-g9ffh\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.513366 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 29 14:07:07 crc kubenswrapper[4634]: I0929 14:07:07.653380 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.274136 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.304782 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.314425 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.331835 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:07:08 crc kubenswrapper[4634]: E0929 14:07:08.332622 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02c51435-cae1-4758-a27a-6e461be7161b" containerName="kube-state-metrics" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.332698 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="02c51435-cae1-4758-a27a-6e461be7161b" containerName="kube-state-metrics" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.333212 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="02c51435-cae1-4758-a27a-6e461be7161b" containerName="kube-state-metrics" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.333971 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.337813 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.337963 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.356863 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.505430 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.505500 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbn8r\" (UniqueName: \"kubernetes.io/projected/d13e0663-c00d-4276-be1d-fc570182e28a-kube-api-access-sbn8r\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.505667 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.505814 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.607970 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.608037 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbn8r\" (UniqueName: \"kubernetes.io/projected/d13e0663-c00d-4276-be1d-fc570182e28a-kube-api-access-sbn8r\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.608087 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.608139 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.614203 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.616116 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.619060 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/d13e0663-c00d-4276-be1d-fc570182e28a-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.638256 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbn8r\" (UniqueName: \"kubernetes.io/projected/d13e0663-c00d-4276-be1d-fc570182e28a-kube-api-access-sbn8r\") pod \"kube-state-metrics-0\" (UID: \"d13e0663-c00d-4276-be1d-fc570182e28a\") " pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.661639 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.824826 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.825516 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="ceilometer-central-agent" containerID="cri-o://8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2" gracePeriod=30 Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.825559 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="sg-core" containerID="cri-o://e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d" gracePeriod=30 Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.825650 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="ceilometer-notification-agent" containerID="cri-o://fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a" gracePeriod=30 Sep 29 14:07:08 crc kubenswrapper[4634]: I0929 14:07:08.825722 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="proxy-httpd" containerID="cri-o://47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c" gracePeriod=30 Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.264693 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.285531 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d13e0663-c00d-4276-be1d-fc570182e28a","Type":"ContainerStarted","Data":"c30080d95e3a9093619e8d0ee11206045fbe0352279d427204d8e7b19bba6914"} Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.289542 4634 generic.go:334] "Generic (PLEG): container finished" podID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerID="47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c" exitCode=0 Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.289608 4634 generic.go:334] "Generic (PLEG): container finished" podID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerID="e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d" exitCode=2 Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.289632 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerDied","Data":"47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c"} Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.289664 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerDied","Data":"e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d"} Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.800037 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mhpvz"] Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.802554 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.825561 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mhpvz"] Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.969309 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-catalog-content\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.970134 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-utilities\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:09 crc kubenswrapper[4634]: I0929 14:07:09.970379 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbnp8\" (UniqueName: \"kubernetes.io/projected/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-kube-api-access-nbnp8\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.072497 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-catalog-content\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.072565 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-utilities\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.072626 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbnp8\" (UniqueName: \"kubernetes.io/projected/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-kube-api-access-nbnp8\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.073680 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-utilities\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.073982 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-catalog-content\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.112888 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbnp8\" (UniqueName: \"kubernetes.io/projected/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-kube-api-access-nbnp8\") pod \"redhat-operators-mhpvz\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.128607 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02c51435-cae1-4758-a27a-6e461be7161b" path="/var/lib/kubelet/pods/02c51435-cae1-4758-a27a-6e461be7161b/volumes" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.184616 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.306200 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d13e0663-c00d-4276-be1d-fc570182e28a","Type":"ContainerStarted","Data":"2242b3d8f84c7d86f6fa4cd2119641a1defc81dc46a3cc3dea727b8889277624"} Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.306713 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.322864 4634 generic.go:334] "Generic (PLEG): container finished" podID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerID="8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2" exitCode=0 Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.323271 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerDied","Data":"8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2"} Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.356463 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.993934898 podStartE2EDuration="2.356432509s" podCreationTimestamp="2025-09-29 14:07:08 +0000 UTC" firstStartedPulling="2025-09-29 14:07:09.261830139 +0000 UTC m=+1359.830557878" lastFinishedPulling="2025-09-29 14:07:09.62432774 +0000 UTC m=+1360.193055489" observedRunningTime="2025-09-29 14:07:10.351827249 +0000 UTC m=+1360.920555008" watchObservedRunningTime="2025-09-29 14:07:10.356432509 +0000 UTC m=+1360.925160258" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.512884 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.513317 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 14:07:10 crc kubenswrapper[4634]: I0929 14:07:10.723045 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mhpvz"] Sep 29 14:07:10 crc kubenswrapper[4634]: W0929 14:07:10.730190 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc3f0c2b_5ce6_4b7a_9094_9577887f53cd.slice/crio-9c7122489e4ef78c12c2005fa0a8430739e0ff73943de51fcc0319c053038540 WatchSource:0}: Error finding container 9c7122489e4ef78c12c2005fa0a8430739e0ff73943de51fcc0319c053038540: Status 404 returned error can't find the container with id 9c7122489e4ef78c12c2005fa0a8430739e0ff73943de51fcc0319c053038540 Sep 29 14:07:11 crc kubenswrapper[4634]: I0929 14:07:11.335031 4634 generic.go:334] "Generic (PLEG): container finished" podID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerID="c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3" exitCode=0 Sep 29 14:07:11 crc kubenswrapper[4634]: I0929 14:07:11.335184 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhpvz" event={"ID":"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd","Type":"ContainerDied","Data":"c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3"} Sep 29 14:07:11 crc kubenswrapper[4634]: I0929 14:07:11.335571 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhpvz" event={"ID":"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd","Type":"ContainerStarted","Data":"9c7122489e4ef78c12c2005fa0a8430739e0ff73943de51fcc0319c053038540"} Sep 29 14:07:11 crc kubenswrapper[4634]: I0929 14:07:11.529339 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:07:11 crc kubenswrapper[4634]: I0929 14:07:11.529467 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.083879 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.236298 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-config-data\") pod \"956b6f4a-f34e-4ab8-a841-d997925e73a4\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.236468 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6262\" (UniqueName: \"kubernetes.io/projected/956b6f4a-f34e-4ab8-a841-d997925e73a4-kube-api-access-b6262\") pod \"956b6f4a-f34e-4ab8-a841-d997925e73a4\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.236528 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-scripts\") pod \"956b6f4a-f34e-4ab8-a841-d997925e73a4\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.236631 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-run-httpd\") pod \"956b6f4a-f34e-4ab8-a841-d997925e73a4\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.236663 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-combined-ca-bundle\") pod \"956b6f4a-f34e-4ab8-a841-d997925e73a4\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.236852 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-sg-core-conf-yaml\") pod \"956b6f4a-f34e-4ab8-a841-d997925e73a4\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.236968 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-log-httpd\") pod \"956b6f4a-f34e-4ab8-a841-d997925e73a4\" (UID: \"956b6f4a-f34e-4ab8-a841-d997925e73a4\") " Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.242549 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "956b6f4a-f34e-4ab8-a841-d997925e73a4" (UID: "956b6f4a-f34e-4ab8-a841-d997925e73a4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.243776 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "956b6f4a-f34e-4ab8-a841-d997925e73a4" (UID: "956b6f4a-f34e-4ab8-a841-d997925e73a4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.246052 4634 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.246110 4634 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/956b6f4a-f34e-4ab8-a841-d997925e73a4-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.263241 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/956b6f4a-f34e-4ab8-a841-d997925e73a4-kube-api-access-b6262" (OuterVolumeSpecName: "kube-api-access-b6262") pod "956b6f4a-f34e-4ab8-a841-d997925e73a4" (UID: "956b6f4a-f34e-4ab8-a841-d997925e73a4"). InnerVolumeSpecName "kube-api-access-b6262". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.277507 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-scripts" (OuterVolumeSpecName: "scripts") pod "956b6f4a-f34e-4ab8-a841-d997925e73a4" (UID: "956b6f4a-f34e-4ab8-a841-d997925e73a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.284436 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "956b6f4a-f34e-4ab8-a841-d997925e73a4" (UID: "956b6f4a-f34e-4ab8-a841-d997925e73a4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.339328 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "956b6f4a-f34e-4ab8-a841-d997925e73a4" (UID: "956b6f4a-f34e-4ab8-a841-d997925e73a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.355451 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6262\" (UniqueName: \"kubernetes.io/projected/956b6f4a-f34e-4ab8-a841-d997925e73a4-kube-api-access-b6262\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.355484 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.355496 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.355506 4634 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.363924 4634 generic.go:334] "Generic (PLEG): container finished" podID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerID="fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a" exitCode=0 Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.363991 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerDied","Data":"fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a"} Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.364031 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"956b6f4a-f34e-4ab8-a841-d997925e73a4","Type":"ContainerDied","Data":"8aaef87db4d0f97947fd4d5a23d2e94e14d0bec93c8ce9296ac79a33f3073361"} Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.364056 4634 scope.go:117] "RemoveContainer" containerID="47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.364383 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.413718 4634 scope.go:117] "RemoveContainer" containerID="e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.439378 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-config-data" (OuterVolumeSpecName: "config-data") pod "956b6f4a-f34e-4ab8-a841-d997925e73a4" (UID: "956b6f4a-f34e-4ab8-a841-d997925e73a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.457152 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/956b6f4a-f34e-4ab8-a841-d997925e73a4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.483931 4634 scope.go:117] "RemoveContainer" containerID="fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.517983 4634 scope.go:117] "RemoveContainer" containerID="8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.579384 4634 scope.go:117] "RemoveContainer" containerID="47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c" Sep 29 14:07:12 crc kubenswrapper[4634]: E0929 14:07:12.579890 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c\": container with ID starting with 47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c not found: ID does not exist" containerID="47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.579929 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c"} err="failed to get container status \"47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c\": rpc error: code = NotFound desc = could not find container \"47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c\": container with ID starting with 47ca44fe906ae31f25b5f6cc300560871ebc2f09222139a9661a7ac55d0fcd6c not found: ID does not exist" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.579958 4634 scope.go:117] "RemoveContainer" containerID="e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d" Sep 29 14:07:12 crc kubenswrapper[4634]: E0929 14:07:12.580211 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d\": container with ID starting with e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d not found: ID does not exist" containerID="e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.580237 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d"} err="failed to get container status \"e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d\": rpc error: code = NotFound desc = could not find container \"e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d\": container with ID starting with e7bd135a1d5f9fb2e2ab99a9558c207e5b8b7ec42f6b2b763c0d1c540636cf8d not found: ID does not exist" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.580252 4634 scope.go:117] "RemoveContainer" containerID="fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a" Sep 29 14:07:12 crc kubenswrapper[4634]: E0929 14:07:12.580913 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a\": container with ID starting with fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a not found: ID does not exist" containerID="fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.580936 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a"} err="failed to get container status \"fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a\": rpc error: code = NotFound desc = could not find container \"fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a\": container with ID starting with fd8ee33a83688c79f443b73fe8b423840903fc28e6f6b0ff9db7999296e71e8a not found: ID does not exist" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.580949 4634 scope.go:117] "RemoveContainer" containerID="8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2" Sep 29 14:07:12 crc kubenswrapper[4634]: E0929 14:07:12.581167 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2\": container with ID starting with 8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2 not found: ID does not exist" containerID="8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.581189 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2"} err="failed to get container status \"8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2\": rpc error: code = NotFound desc = could not find container \"8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2\": container with ID starting with 8bd1182348a58595a0af3d3d973beaae8f7cb8729f28e07d22290c94ff05e9a2 not found: ID does not exist" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.654249 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.701406 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.752166 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.768979 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.782743 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:12 crc kubenswrapper[4634]: E0929 14:07:12.783364 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="ceilometer-central-agent" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.783390 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="ceilometer-central-agent" Sep 29 14:07:12 crc kubenswrapper[4634]: E0929 14:07:12.783437 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="ceilometer-notification-agent" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.783464 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="ceilometer-notification-agent" Sep 29 14:07:12 crc kubenswrapper[4634]: E0929 14:07:12.783494 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="proxy-httpd" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.783503 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="proxy-httpd" Sep 29 14:07:12 crc kubenswrapper[4634]: E0929 14:07:12.783518 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="sg-core" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.783528 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="sg-core" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.783794 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="ceilometer-notification-agent" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.783828 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="ceilometer-central-agent" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.783843 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="proxy-httpd" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.783861 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" containerName="sg-core" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.790448 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.794772 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.795721 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.795890 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.796104 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.882748 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-log-httpd\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.882828 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-config-data\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.882877 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.883306 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-scripts\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.883378 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.883602 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.883671 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-run-httpd\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.883702 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klcds\" (UniqueName: \"kubernetes.io/projected/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-kube-api-access-klcds\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.985663 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.985772 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-scripts\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.985795 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.985836 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.985859 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-run-httpd\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.985877 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klcds\" (UniqueName: \"kubernetes.io/projected/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-kube-api-access-klcds\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.985928 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-log-httpd\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.985969 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-config-data\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.986711 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-run-httpd\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.986998 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-log-httpd\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.991964 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.992058 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-scripts\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.993833 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:12 crc kubenswrapper[4634]: I0929 14:07:12.994321 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-config-data\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:13 crc kubenswrapper[4634]: I0929 14:07:13.001554 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:13 crc kubenswrapper[4634]: I0929 14:07:13.016189 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klcds\" (UniqueName: \"kubernetes.io/projected/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-kube-api-access-klcds\") pod \"ceilometer-0\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " pod="openstack/ceilometer-0" Sep 29 14:07:13 crc kubenswrapper[4634]: I0929 14:07:13.134621 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:13 crc kubenswrapper[4634]: I0929 14:07:13.392953 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhpvz" event={"ID":"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd","Type":"ContainerStarted","Data":"502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911"} Sep 29 14:07:13 crc kubenswrapper[4634]: I0929 14:07:13.445813 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 14:07:13 crc kubenswrapper[4634]: I0929 14:07:13.557474 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 14:07:13 crc kubenswrapper[4634]: I0929 14:07:13.557604 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 14:07:13 crc kubenswrapper[4634]: I0929 14:07:13.676886 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.122533 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="956b6f4a-f34e-4ab8-a841-d997925e73a4" path="/var/lib/kubelet/pods/956b6f4a-f34e-4ab8-a841-d997925e73a4/volumes" Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.396236 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.396759 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.396829 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.398144 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c310be9cfa8ed67485f93d39340fd3b9cbd0be1e3fbae3bd53e3014ebdb22b63"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.398210 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://c310be9cfa8ed67485f93d39340fd3b9cbd0be1e3fbae3bd53e3014ebdb22b63" gracePeriod=600 Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.424608 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerStarted","Data":"276474ad71cbe71d3b794e64fdd26de5c4b17cbcab5468b51a7b1c08d2a4da81"} Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.642740 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:07:14 crc kubenswrapper[4634]: I0929 14:07:14.642743 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.198:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 14:07:15 crc kubenswrapper[4634]: I0929 14:07:15.480754 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerStarted","Data":"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d"} Sep 29 14:07:15 crc kubenswrapper[4634]: I0929 14:07:15.495485 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="c310be9cfa8ed67485f93d39340fd3b9cbd0be1e3fbae3bd53e3014ebdb22b63" exitCode=0 Sep 29 14:07:15 crc kubenswrapper[4634]: I0929 14:07:15.495536 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"c310be9cfa8ed67485f93d39340fd3b9cbd0be1e3fbae3bd53e3014ebdb22b63"} Sep 29 14:07:15 crc kubenswrapper[4634]: I0929 14:07:15.495569 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a"} Sep 29 14:07:15 crc kubenswrapper[4634]: I0929 14:07:15.495605 4634 scope.go:117] "RemoveContainer" containerID="57b4d47644425468a03fbc283811a82747ba711e9f6742c5de405a2bc380e087" Sep 29 14:07:16 crc kubenswrapper[4634]: I0929 14:07:16.510726 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerStarted","Data":"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2"} Sep 29 14:07:16 crc kubenswrapper[4634]: I0929 14:07:16.511370 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerStarted","Data":"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5"} Sep 29 14:07:17 crc kubenswrapper[4634]: I0929 14:07:17.529444 4634 generic.go:334] "Generic (PLEG): container finished" podID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerID="502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911" exitCode=0 Sep 29 14:07:17 crc kubenswrapper[4634]: I0929 14:07:17.529532 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhpvz" event={"ID":"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd","Type":"ContainerDied","Data":"502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911"} Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.417054 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.556720 4634 generic.go:334] "Generic (PLEG): container finished" podID="a29454b6-89c5-496f-af78-230a3ace579f" containerID="f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a" exitCode=137 Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.556807 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.556848 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a29454b6-89c5-496f-af78-230a3ace579f","Type":"ContainerDied","Data":"f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a"} Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.557563 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a29454b6-89c5-496f-af78-230a3ace579f","Type":"ContainerDied","Data":"cdce7f969454a54baafa55c2f53ae41cbbc1022f0ed252bcf84269b685426ff0"} Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.557589 4634 scope.go:117] "RemoveContainer" containerID="f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.560295 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-config-data\") pod \"a29454b6-89c5-496f-af78-230a3ace579f\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.560495 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jlcpr\" (UniqueName: \"kubernetes.io/projected/a29454b6-89c5-496f-af78-230a3ace579f-kube-api-access-jlcpr\") pod \"a29454b6-89c5-496f-af78-230a3ace579f\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.560676 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-combined-ca-bundle\") pod \"a29454b6-89c5-496f-af78-230a3ace579f\" (UID: \"a29454b6-89c5-496f-af78-230a3ace579f\") " Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.574287 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29454b6-89c5-496f-af78-230a3ace579f-kube-api-access-jlcpr" (OuterVolumeSpecName: "kube-api-access-jlcpr") pod "a29454b6-89c5-496f-af78-230a3ace579f" (UID: "a29454b6-89c5-496f-af78-230a3ace579f"). InnerVolumeSpecName "kube-api-access-jlcpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.575365 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerStarted","Data":"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4"} Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.576256 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.596630 4634 scope.go:117] "RemoveContainer" containerID="f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a" Sep 29 14:07:18 crc kubenswrapper[4634]: E0929 14:07:18.599879 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a\": container with ID starting with f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a not found: ID does not exist" containerID="f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.599955 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a"} err="failed to get container status \"f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a\": rpc error: code = NotFound desc = could not find container \"f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a\": container with ID starting with f5d92db8d495a8cde6f03f965f2d28434863ffa7181f17f91ba4f66e82bffa3a not found: ID does not exist" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.613463 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a29454b6-89c5-496f-af78-230a3ace579f" (UID: "a29454b6-89c5-496f-af78-230a3ace579f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.613872 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.4076520439999998 podStartE2EDuration="6.613834749s" podCreationTimestamp="2025-09-29 14:07:12 +0000 UTC" firstStartedPulling="2025-09-29 14:07:13.687874018 +0000 UTC m=+1364.256601767" lastFinishedPulling="2025-09-29 14:07:17.894056713 +0000 UTC m=+1368.462784472" observedRunningTime="2025-09-29 14:07:18.606107016 +0000 UTC m=+1369.174834765" watchObservedRunningTime="2025-09-29 14:07:18.613834749 +0000 UTC m=+1369.182562498" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.619766 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-config-data" (OuterVolumeSpecName: "config-data") pod "a29454b6-89c5-496f-af78-230a3ace579f" (UID: "a29454b6-89c5-496f-af78-230a3ace579f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.665868 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.665899 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29454b6-89c5-496f-af78-230a3ace579f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.665911 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jlcpr\" (UniqueName: \"kubernetes.io/projected/a29454b6-89c5-496f-af78-230a3ace579f-kube-api-access-jlcpr\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.682406 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.933131 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.957325 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.987599 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:07:18 crc kubenswrapper[4634]: E0929 14:07:18.988290 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29454b6-89c5-496f-af78-230a3ace579f" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.988314 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29454b6-89c5-496f-af78-230a3ace579f" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.988559 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29454b6-89c5-496f-af78-230a3ace579f" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.989553 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.993062 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.994138 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 29 14:07:18 crc kubenswrapper[4634]: I0929 14:07:18.996759 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.003564 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.081668 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqrvb\" (UniqueName: \"kubernetes.io/projected/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-kube-api-access-gqrvb\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.081748 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.081941 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.082192 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.082627 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.185217 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.185359 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqrvb\" (UniqueName: \"kubernetes.io/projected/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-kube-api-access-gqrvb\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.185413 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.185510 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.185586 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.193886 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.194484 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.196388 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.206353 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqrvb\" (UniqueName: \"kubernetes.io/projected/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-kube-api-access-gqrvb\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.206409 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bb80ea7-a2f1-4eeb-9205-722fda8a48b2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.324120 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.605765 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhpvz" event={"ID":"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd","Type":"ContainerStarted","Data":"e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494"} Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.634265 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mhpvz" podStartSLOduration=3.772567667 podStartE2EDuration="10.634238154s" podCreationTimestamp="2025-09-29 14:07:09 +0000 UTC" firstStartedPulling="2025-09-29 14:07:11.33737226 +0000 UTC m=+1361.906100009" lastFinishedPulling="2025-09-29 14:07:18.199042747 +0000 UTC m=+1368.767770496" observedRunningTime="2025-09-29 14:07:19.630958588 +0000 UTC m=+1370.199686357" watchObservedRunningTime="2025-09-29 14:07:19.634238154 +0000 UTC m=+1370.202965903" Sep 29 14:07:19 crc kubenswrapper[4634]: I0929 14:07:19.933542 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.141033 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29454b6-89c5-496f-af78-230a3ace579f" path="/var/lib/kubelet/pods/a29454b6-89c5-496f-af78-230a3ace579f/volumes" Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.186524 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.186571 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.520752 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.526002 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.531122 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.619202 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2","Type":"ContainerStarted","Data":"d40c5aa286adddcbdc951503c0212d0f6d170457fe2427a6652a6b08ad020372"} Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.619273 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"6bb80ea7-a2f1-4eeb-9205-722fda8a48b2","Type":"ContainerStarted","Data":"9a89ae17f24a1bb703f2b29a04fafe1656d831ddb0350bd880809b5cb023b12d"} Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.642883 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 14:07:20 crc kubenswrapper[4634]: I0929 14:07:20.647756 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.647738038 podStartE2EDuration="2.647738038s" podCreationTimestamp="2025-09-29 14:07:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:20.643483156 +0000 UTC m=+1371.212210905" watchObservedRunningTime="2025-09-29 14:07:20.647738038 +0000 UTC m=+1371.216465787" Sep 29 14:07:21 crc kubenswrapper[4634]: I0929 14:07:21.252857 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mhpvz" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="registry-server" probeResult="failure" output=< Sep 29 14:07:21 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:07:21 crc kubenswrapper[4634]: > Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.562658 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.563660 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.565542 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.565573 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.572865 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.574184 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.847716 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-5lnpd"] Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.849542 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.878952 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-5lnpd"] Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.905015 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.905167 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-config\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.905232 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89xkr\" (UniqueName: \"kubernetes.io/projected/8a51b64e-3f78-4240-8c8d-3744e65046f5-kube-api-access-89xkr\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.905276 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.905296 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:23 crc kubenswrapper[4634]: I0929 14:07:23.905314 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.007863 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.007920 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.007967 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.009073 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.009196 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-config\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.009264 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89xkr\" (UniqueName: \"kubernetes.io/projected/8a51b64e-3f78-4240-8c8d-3744e65046f5-kube-api-access-89xkr\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.010298 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.011235 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.011578 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.011787 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.012882 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-config\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.037567 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89xkr\" (UniqueName: \"kubernetes.io/projected/8a51b64e-3f78-4240-8c8d-3744e65046f5-kube-api-access-89xkr\") pod \"dnsmasq-dns-59cf4bdb65-5lnpd\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.182983 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.324831 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:24 crc kubenswrapper[4634]: I0929 14:07:24.785888 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-5lnpd"] Sep 29 14:07:25 crc kubenswrapper[4634]: I0929 14:07:25.671933 4634 generic.go:334] "Generic (PLEG): container finished" podID="8a51b64e-3f78-4240-8c8d-3744e65046f5" containerID="c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b" exitCode=0 Sep 29 14:07:25 crc kubenswrapper[4634]: I0929 14:07:25.671985 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" event={"ID":"8a51b64e-3f78-4240-8c8d-3744e65046f5","Type":"ContainerDied","Data":"c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b"} Sep 29 14:07:25 crc kubenswrapper[4634]: I0929 14:07:25.672019 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" event={"ID":"8a51b64e-3f78-4240-8c8d-3744e65046f5","Type":"ContainerStarted","Data":"7516b8a80f9deb388653a3663d0f0810866fb7b90c6fe36151c3635137a54e72"} Sep 29 14:07:27 crc kubenswrapper[4634]: I0929 14:07:27.349062 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:27 crc kubenswrapper[4634]: I0929 14:07:27.349624 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-log" containerID="cri-o://c7380e058ca02b21cb710ab03d339dec727ed948084e6550cf4c0cb00ca5ba13" gracePeriod=30 Sep 29 14:07:27 crc kubenswrapper[4634]: I0929 14:07:27.349753 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-api" containerID="cri-o://8b0a6b390e2ead94abd83ec76fe372c3f61519f6438079967e67d14dd5c12609" gracePeriod=30 Sep 29 14:07:27 crc kubenswrapper[4634]: I0929 14:07:27.709909 4634 generic.go:334] "Generic (PLEG): container finished" podID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerID="c7380e058ca02b21cb710ab03d339dec727ed948084e6550cf4c0cb00ca5ba13" exitCode=143 Sep 29 14:07:27 crc kubenswrapper[4634]: I0929 14:07:27.710003 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b78c74f5-a9ef-47c7-94ec-17e2614fd854","Type":"ContainerDied","Data":"c7380e058ca02b21cb710ab03d339dec727ed948084e6550cf4c0cb00ca5ba13"} Sep 29 14:07:27 crc kubenswrapper[4634]: I0929 14:07:27.724855 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" event={"ID":"8a51b64e-3f78-4240-8c8d-3744e65046f5","Type":"ContainerStarted","Data":"5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95"} Sep 29 14:07:27 crc kubenswrapper[4634]: I0929 14:07:27.725161 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:27 crc kubenswrapper[4634]: I0929 14:07:27.771128 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" podStartSLOduration=4.771106205 podStartE2EDuration="4.771106205s" podCreationTimestamp="2025-09-29 14:07:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:27.766951286 +0000 UTC m=+1378.335679035" watchObservedRunningTime="2025-09-29 14:07:27.771106205 +0000 UTC m=+1378.339833954" Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.251460 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.254202 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="ceilometer-central-agent" containerID="cri-o://df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d" gracePeriod=30 Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.254320 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="sg-core" containerID="cri-o://63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2" gracePeriod=30 Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.254283 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="proxy-httpd" containerID="cri-o://78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4" gracePeriod=30 Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.254370 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="ceilometer-notification-agent" containerID="cri-o://19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5" gracePeriod=30 Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.263998 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.201:3000/\": EOF" Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.738561 4634 generic.go:334] "Generic (PLEG): container finished" podID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerID="78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4" exitCode=0 Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.738598 4634 generic.go:334] "Generic (PLEG): container finished" podID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerID="63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2" exitCode=2 Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.738646 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerDied","Data":"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4"} Sep 29 14:07:28 crc kubenswrapper[4634]: I0929 14:07:28.738711 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerDied","Data":"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2"} Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.324579 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.376611 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.486890 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.578804 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-scripts\") pod \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.578985 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-sg-core-conf-yaml\") pod \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.579008 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-config-data\") pod \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.579033 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-ceilometer-tls-certs\") pod \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.579126 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-log-httpd\") pod \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.579186 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klcds\" (UniqueName: \"kubernetes.io/projected/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-kube-api-access-klcds\") pod \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.579204 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-combined-ca-bundle\") pod \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.579252 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-run-httpd\") pod \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\" (UID: \"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb\") " Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.580164 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" (UID: "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.584174 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" (UID: "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.615047 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-kube-api-access-klcds" (OuterVolumeSpecName: "kube-api-access-klcds") pod "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" (UID: "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb"). InnerVolumeSpecName "kube-api-access-klcds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.621722 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-scripts" (OuterVolumeSpecName: "scripts") pod "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" (UID: "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.692763 4634 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.693247 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klcds\" (UniqueName: \"kubernetes.io/projected/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-kube-api-access-klcds\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.693262 4634 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.693271 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.725191 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" (UID: "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.732740 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" (UID: "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.756904 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" (UID: "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.767540 4634 generic.go:334] "Generic (PLEG): container finished" podID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerID="19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5" exitCode=0 Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.767581 4634 generic.go:334] "Generic (PLEG): container finished" podID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerID="df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d" exitCode=0 Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.768903 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.769112 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerDied","Data":"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5"} Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.769184 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerDied","Data":"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d"} Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.769206 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b594b5a8-cf40-4990-9cdf-0b48ac41a8eb","Type":"ContainerDied","Data":"276474ad71cbe71d3b794e64fdd26de5c4b17cbcab5468b51a7b1c08d2a4da81"} Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.769230 4634 scope.go:117] "RemoveContainer" containerID="78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.797205 4634 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.797234 4634 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.797272 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.808513 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.818149 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-config-data" (OuterVolumeSpecName: "config-data") pod "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" (UID: "b594b5a8-cf40-4990-9cdf-0b48ac41a8eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.869687 4634 scope.go:117] "RemoveContainer" containerID="63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.902920 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.926530 4634 scope.go:117] "RemoveContainer" containerID="19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5" Sep 29 14:07:29 crc kubenswrapper[4634]: I0929 14:07:29.971707 4634 scope.go:117] "RemoveContainer" containerID="df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.020886 4634 scope.go:117] "RemoveContainer" containerID="78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4" Sep 29 14:07:30 crc kubenswrapper[4634]: E0929 14:07:30.039131 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4\": container with ID starting with 78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4 not found: ID does not exist" containerID="78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.039183 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4"} err="failed to get container status \"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4\": rpc error: code = NotFound desc = could not find container \"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4\": container with ID starting with 78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4 not found: ID does not exist" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.039264 4634 scope.go:117] "RemoveContainer" containerID="63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2" Sep 29 14:07:30 crc kubenswrapper[4634]: E0929 14:07:30.045249 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2\": container with ID starting with 63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2 not found: ID does not exist" containerID="63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.045296 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2"} err="failed to get container status \"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2\": rpc error: code = NotFound desc = could not find container \"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2\": container with ID starting with 63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2 not found: ID does not exist" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.045326 4634 scope.go:117] "RemoveContainer" containerID="19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5" Sep 29 14:07:30 crc kubenswrapper[4634]: E0929 14:07:30.049162 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5\": container with ID starting with 19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5 not found: ID does not exist" containerID="19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.049192 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5"} err="failed to get container status \"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5\": rpc error: code = NotFound desc = could not find container \"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5\": container with ID starting with 19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5 not found: ID does not exist" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.049211 4634 scope.go:117] "RemoveContainer" containerID="df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d" Sep 29 14:07:30 crc kubenswrapper[4634]: E0929 14:07:30.049827 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d\": container with ID starting with df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d not found: ID does not exist" containerID="df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.049846 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d"} err="failed to get container status \"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d\": rpc error: code = NotFound desc = could not find container \"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d\": container with ID starting with df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d not found: ID does not exist" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.049858 4634 scope.go:117] "RemoveContainer" containerID="78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.051562 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4"} err="failed to get container status \"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4\": rpc error: code = NotFound desc = could not find container \"78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4\": container with ID starting with 78aa7161d2e810a63345223b03d5144f8f30a91b082052a0ded9c4cd79c6cba4 not found: ID does not exist" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.051581 4634 scope.go:117] "RemoveContainer" containerID="63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.054233 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2"} err="failed to get container status \"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2\": rpc error: code = NotFound desc = could not find container \"63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2\": container with ID starting with 63159fef8b15bc5eb94b61b841e599fec4e433ab1f0ac5bbd501d84697ee1cf2 not found: ID does not exist" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.054287 4634 scope.go:117] "RemoveContainer" containerID="19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.055133 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5"} err="failed to get container status \"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5\": rpc error: code = NotFound desc = could not find container \"19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5\": container with ID starting with 19235fcd7db58498733b78af3db43735731e8a9c827c15953e41cd0d93f827f5 not found: ID does not exist" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.055171 4634 scope.go:117] "RemoveContainer" containerID="df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.057716 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d"} err="failed to get container status \"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d\": rpc error: code = NotFound desc = could not find container \"df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d\": container with ID starting with df1dd4e385d3886831ae075b184188254e4333558da3184864a614d36823a82d not found: ID does not exist" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.086407 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-dvmgw"] Sep 29 14:07:30 crc kubenswrapper[4634]: E0929 14:07:30.087323 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="ceilometer-central-agent" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.087339 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="ceilometer-central-agent" Sep 29 14:07:30 crc kubenswrapper[4634]: E0929 14:07:30.087387 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="ceilometer-notification-agent" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.087396 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="ceilometer-notification-agent" Sep 29 14:07:30 crc kubenswrapper[4634]: E0929 14:07:30.087410 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="proxy-httpd" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.087418 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="proxy-httpd" Sep 29 14:07:30 crc kubenswrapper[4634]: E0929 14:07:30.087591 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="sg-core" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.087602 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="sg-core" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.087951 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="sg-core" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.088000 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="ceilometer-central-agent" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.088015 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="ceilometer-notification-agent" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.088028 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" containerName="proxy-httpd" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.092942 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.096076 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.097505 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.156985 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dvmgw"] Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.168185 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.175105 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.192470 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.195528 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.200133 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.200358 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.201356 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.208848 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcm7q\" (UniqueName: \"kubernetes.io/projected/e00a014d-19be-46d4-91e0-0b9a34160195-kube-api-access-jcm7q\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.208912 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-config-data\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.208968 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.209028 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-scripts\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.226585 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.310925 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzmzb\" (UniqueName: \"kubernetes.io/projected/67123c57-e7d0-4982-9aa8-ab2e00905535-kube-api-access-kzmzb\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.310980 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcm7q\" (UniqueName: \"kubernetes.io/projected/e00a014d-19be-46d4-91e0-0b9a34160195-kube-api-access-jcm7q\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311024 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-config-data\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311041 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-scripts\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311068 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311133 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311175 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311199 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-config-data\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311233 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-scripts\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311380 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-log-httpd\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311499 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.311595 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-run-httpd\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.315981 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-scripts\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.318921 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-config-data\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.323835 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.329835 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcm7q\" (UniqueName: \"kubernetes.io/projected/e00a014d-19be-46d4-91e0-0b9a34160195-kube-api-access-jcm7q\") pod \"nova-cell1-cell-mapping-dvmgw\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.414704 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.414793 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-run-httpd\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.415278 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-run-httpd\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.415348 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzmzb\" (UniqueName: \"kubernetes.io/projected/67123c57-e7d0-4982-9aa8-ab2e00905535-kube-api-access-kzmzb\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.415418 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-scripts\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.415446 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.415532 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.415661 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-config-data\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.416113 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-log-httpd\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.416399 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-log-httpd\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.418823 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.419708 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.420477 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.421121 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-config-data\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.436060 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.436389 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-scripts\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.451879 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzmzb\" (UniqueName: \"kubernetes.io/projected/67123c57-e7d0-4982-9aa8-ab2e00905535-kube-api-access-kzmzb\") pod \"ceilometer-0\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.527243 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.795657 4634 generic.go:334] "Generic (PLEG): container finished" podID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerID="8b0a6b390e2ead94abd83ec76fe372c3f61519f6438079967e67d14dd5c12609" exitCode=0 Sep 29 14:07:30 crc kubenswrapper[4634]: I0929 14:07:30.796502 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b78c74f5-a9ef-47c7-94ec-17e2614fd854","Type":"ContainerDied","Data":"8b0a6b390e2ead94abd83ec76fe372c3f61519f6438079967e67d14dd5c12609"} Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.156734 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dvmgw"] Sep 29 14:07:31 crc kubenswrapper[4634]: W0929 14:07:31.168721 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode00a014d_19be_46d4_91e0_0b9a34160195.slice/crio-853e40ab18799d0d92ef382d81618a3a9ff1f8d3b7e5eb642526ef0a6b28e60d WatchSource:0}: Error finding container 853e40ab18799d0d92ef382d81618a3a9ff1f8d3b7e5eb642526ef0a6b28e60d: Status 404 returned error can't find the container with id 853e40ab18799d0d92ef382d81618a3a9ff1f8d3b7e5eb642526ef0a6b28e60d Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.261444 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mhpvz" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="registry-server" probeResult="failure" output=< Sep 29 14:07:31 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:07:31 crc kubenswrapper[4634]: > Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.261579 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.288790 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.446191 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b78c74f5-a9ef-47c7-94ec-17e2614fd854-logs\") pod \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.446374 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8w4tb\" (UniqueName: \"kubernetes.io/projected/b78c74f5-a9ef-47c7-94ec-17e2614fd854-kube-api-access-8w4tb\") pod \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.446501 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-combined-ca-bundle\") pod \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.446734 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-config-data\") pod \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\" (UID: \"b78c74f5-a9ef-47c7-94ec-17e2614fd854\") " Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.467816 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b78c74f5-a9ef-47c7-94ec-17e2614fd854-logs" (OuterVolumeSpecName: "logs") pod "b78c74f5-a9ef-47c7-94ec-17e2614fd854" (UID: "b78c74f5-a9ef-47c7-94ec-17e2614fd854"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.479266 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b78c74f5-a9ef-47c7-94ec-17e2614fd854-kube-api-access-8w4tb" (OuterVolumeSpecName: "kube-api-access-8w4tb") pod "b78c74f5-a9ef-47c7-94ec-17e2614fd854" (UID: "b78c74f5-a9ef-47c7-94ec-17e2614fd854"). InnerVolumeSpecName "kube-api-access-8w4tb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.555491 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b78c74f5-a9ef-47c7-94ec-17e2614fd854-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.555738 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8w4tb\" (UniqueName: \"kubernetes.io/projected/b78c74f5-a9ef-47c7-94ec-17e2614fd854-kube-api-access-8w4tb\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.558502 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b78c74f5-a9ef-47c7-94ec-17e2614fd854" (UID: "b78c74f5-a9ef-47c7-94ec-17e2614fd854"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.572310 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.583388 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-config-data" (OuterVolumeSpecName: "config-data") pod "b78c74f5-a9ef-47c7-94ec-17e2614fd854" (UID: "b78c74f5-a9ef-47c7-94ec-17e2614fd854"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.658093 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.658140 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b78c74f5-a9ef-47c7-94ec-17e2614fd854-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.836941 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dvmgw" event={"ID":"e00a014d-19be-46d4-91e0-0b9a34160195","Type":"ContainerStarted","Data":"e51de5759532a1462c272d8c9a3f4b7a790fc30ef4279d18b24024b7218795e8"} Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.837002 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dvmgw" event={"ID":"e00a014d-19be-46d4-91e0-0b9a34160195","Type":"ContainerStarted","Data":"853e40ab18799d0d92ef382d81618a3a9ff1f8d3b7e5eb642526ef0a6b28e60d"} Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.841133 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerStarted","Data":"b32901a2edb3b859c8e26de3c87ec7599542184e8d40f26985a868a8f4429048"} Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.843864 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"b78c74f5-a9ef-47c7-94ec-17e2614fd854","Type":"ContainerDied","Data":"3ac9879fa52488b709a29fd8f1fcf4e846349c1f3ec62ae2587fe8cbe57f549e"} Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.843933 4634 scope.go:117] "RemoveContainer" containerID="8b0a6b390e2ead94abd83ec76fe372c3f61519f6438079967e67d14dd5c12609" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.844145 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.865695 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-dvmgw" podStartSLOduration=1.865672545 podStartE2EDuration="1.865672545s" podCreationTimestamp="2025-09-29 14:07:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:31.85898371 +0000 UTC m=+1382.427711459" watchObservedRunningTime="2025-09-29 14:07:31.865672545 +0000 UTC m=+1382.434400294" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.879756 4634 scope.go:117] "RemoveContainer" containerID="c7380e058ca02b21cb710ab03d339dec727ed948084e6550cf4c0cb00ca5ba13" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.907886 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.927588 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.943557 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:31 crc kubenswrapper[4634]: E0929 14:07:31.944165 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-log" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.944184 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-log" Sep 29 14:07:31 crc kubenswrapper[4634]: E0929 14:07:31.944219 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-api" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.944225 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-api" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.944420 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-log" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.944438 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" containerName="nova-api-api" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.945914 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.952622 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.953771 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.953905 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 14:07:31 crc kubenswrapper[4634]: I0929 14:07:31.980750 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.071969 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c792bb76-dbea-464c-916e-335db43f086f-logs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.072035 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-config-data\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.072062 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.072164 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l9m9\" (UniqueName: \"kubernetes.io/projected/c792bb76-dbea-464c-916e-335db43f086f-kube-api-access-8l9m9\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.072212 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.072258 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-public-tls-certs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.129573 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b594b5a8-cf40-4990-9cdf-0b48ac41a8eb" path="/var/lib/kubelet/pods/b594b5a8-cf40-4990-9cdf-0b48ac41a8eb/volumes" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.130996 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b78c74f5-a9ef-47c7-94ec-17e2614fd854" path="/var/lib/kubelet/pods/b78c74f5-a9ef-47c7-94ec-17e2614fd854/volumes" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.173606 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-public-tls-certs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.173654 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c792bb76-dbea-464c-916e-335db43f086f-logs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.173686 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-config-data\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.174915 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c792bb76-dbea-464c-916e-335db43f086f-logs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.175466 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.176054 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l9m9\" (UniqueName: \"kubernetes.io/projected/c792bb76-dbea-464c-916e-335db43f086f-kube-api-access-8l9m9\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.176201 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.178309 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-public-tls-certs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.179886 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-config-data\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.180386 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.181191 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.202550 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l9m9\" (UniqueName: \"kubernetes.io/projected/c792bb76-dbea-464c-916e-335db43f086f-kube-api-access-8l9m9\") pod \"nova-api-0\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " pod="openstack/nova-api-0" Sep 29 14:07:32 crc kubenswrapper[4634]: I0929 14:07:32.287236 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:33 crc kubenswrapper[4634]: I0929 14:07:32.855882 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerStarted","Data":"84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9"} Sep 29 14:07:33 crc kubenswrapper[4634]: I0929 14:07:33.534284 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:33 crc kubenswrapper[4634]: I0929 14:07:33.875750 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerStarted","Data":"bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043"} Sep 29 14:07:33 crc kubenswrapper[4634]: I0929 14:07:33.878563 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c792bb76-dbea-464c-916e-335db43f086f","Type":"ContainerStarted","Data":"0d95e4a959c0f98984fc268d457226e4566f94507a6225b34c99bb019c39bdf3"} Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.184275 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.257949 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-bcgbg"] Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.258637 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" podUID="b390039e-04d4-492f-b211-091f63ab658b" containerName="dnsmasq-dns" containerID="cri-o://c5f6148230cfbaeb5f7a734e7f04c62a5daa46cddddaf9a16d2c517a6b04c2a3" gracePeriod=10 Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.967358 4634 generic.go:334] "Generic (PLEG): container finished" podID="b390039e-04d4-492f-b211-091f63ab658b" containerID="c5f6148230cfbaeb5f7a734e7f04c62a5daa46cddddaf9a16d2c517a6b04c2a3" exitCode=0 Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.968425 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" event={"ID":"b390039e-04d4-492f-b211-091f63ab658b","Type":"ContainerDied","Data":"c5f6148230cfbaeb5f7a734e7f04c62a5daa46cddddaf9a16d2c517a6b04c2a3"} Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.968509 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" event={"ID":"b390039e-04d4-492f-b211-091f63ab658b","Type":"ContainerDied","Data":"ebebcadbd8fecb9f3eb5089d0e56c19d04f74dbf4e5a025b227382b5abc7e128"} Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.968573 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebebcadbd8fecb9f3eb5089d0e56c19d04f74dbf4e5a025b227382b5abc7e128" Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.991907 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerStarted","Data":"ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52"} Sep 29 14:07:34 crc kubenswrapper[4634]: I0929 14:07:34.992976 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:34.998840 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c792bb76-dbea-464c-916e-335db43f086f","Type":"ContainerStarted","Data":"bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082"} Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:34.998902 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c792bb76-dbea-464c-916e-335db43f086f","Type":"ContainerStarted","Data":"d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5"} Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.082726 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftf2m\" (UniqueName: \"kubernetes.io/projected/b390039e-04d4-492f-b211-091f63ab658b-kube-api-access-ftf2m\") pod \"b390039e-04d4-492f-b211-091f63ab658b\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.082869 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-sb\") pod \"b390039e-04d4-492f-b211-091f63ab658b\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.082983 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-nb\") pod \"b390039e-04d4-492f-b211-091f63ab658b\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.083296 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-svc\") pod \"b390039e-04d4-492f-b211-091f63ab658b\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.083342 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-swift-storage-0\") pod \"b390039e-04d4-492f-b211-091f63ab658b\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.083398 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-config\") pod \"b390039e-04d4-492f-b211-091f63ab658b\" (UID: \"b390039e-04d4-492f-b211-091f63ab658b\") " Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.084765 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.084742509 podStartE2EDuration="4.084742509s" podCreationTimestamp="2025-09-29 14:07:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:35.043865458 +0000 UTC m=+1385.612593207" watchObservedRunningTime="2025-09-29 14:07:35.084742509 +0000 UTC m=+1385.653470258" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.140825 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b390039e-04d4-492f-b211-091f63ab658b-kube-api-access-ftf2m" (OuterVolumeSpecName: "kube-api-access-ftf2m") pod "b390039e-04d4-492f-b211-091f63ab658b" (UID: "b390039e-04d4-492f-b211-091f63ab658b"). InnerVolumeSpecName "kube-api-access-ftf2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.197690 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftf2m\" (UniqueName: \"kubernetes.io/projected/b390039e-04d4-492f-b211-091f63ab658b-kube-api-access-ftf2m\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.247025 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b390039e-04d4-492f-b211-091f63ab658b" (UID: "b390039e-04d4-492f-b211-091f63ab658b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.256161 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b390039e-04d4-492f-b211-091f63ab658b" (UID: "b390039e-04d4-492f-b211-091f63ab658b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.260725 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b390039e-04d4-492f-b211-091f63ab658b" (UID: "b390039e-04d4-492f-b211-091f63ab658b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.272737 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-config" (OuterVolumeSpecName: "config") pod "b390039e-04d4-492f-b211-091f63ab658b" (UID: "b390039e-04d4-492f-b211-091f63ab658b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.280056 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b390039e-04d4-492f-b211-091f63ab658b" (UID: "b390039e-04d4-492f-b211-091f63ab658b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.299785 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.299823 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.299835 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.299845 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:35 crc kubenswrapper[4634]: I0929 14:07:35.299853 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b390039e-04d4-492f-b211-091f63ab658b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.012300 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerStarted","Data":"cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5"} Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.012335 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-bcgbg" Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.012502 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="ceilometer-central-agent" containerID="cri-o://84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9" gracePeriod=30 Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.012575 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="sg-core" containerID="cri-o://ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52" gracePeriod=30 Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.012599 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="proxy-httpd" containerID="cri-o://cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5" gracePeriod=30 Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.012658 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="ceilometer-notification-agent" containerID="cri-o://bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043" gracePeriod=30 Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.034826 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.052283 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.8886344560000001 podStartE2EDuration="6.052261318s" podCreationTimestamp="2025-09-29 14:07:30 +0000 UTC" firstStartedPulling="2025-09-29 14:07:31.271878351 +0000 UTC m=+1381.840606100" lastFinishedPulling="2025-09-29 14:07:35.435505213 +0000 UTC m=+1386.004232962" observedRunningTime="2025-09-29 14:07:36.041154247 +0000 UTC m=+1386.609881996" watchObservedRunningTime="2025-09-29 14:07:36.052261318 +0000 UTC m=+1386.620989067" Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.072883 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-bcgbg"] Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.087595 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-bcgbg"] Sep 29 14:07:36 crc kubenswrapper[4634]: I0929 14:07:36.125135 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b390039e-04d4-492f-b211-091f63ab658b" path="/var/lib/kubelet/pods/b390039e-04d4-492f-b211-091f63ab658b/volumes" Sep 29 14:07:36 crc kubenswrapper[4634]: E0929 14:07:36.618321 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67123c57_e7d0_4982_9aa8_ab2e00905535.slice/crio-bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043.scope\": RecentStats: unable to find data in memory cache]" Sep 29 14:07:37 crc kubenswrapper[4634]: I0929 14:07:37.027473 4634 generic.go:334] "Generic (PLEG): container finished" podID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerID="cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5" exitCode=0 Sep 29 14:07:37 crc kubenswrapper[4634]: I0929 14:07:37.027525 4634 generic.go:334] "Generic (PLEG): container finished" podID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerID="ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52" exitCode=2 Sep 29 14:07:37 crc kubenswrapper[4634]: I0929 14:07:37.027535 4634 generic.go:334] "Generic (PLEG): container finished" podID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerID="bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043" exitCode=0 Sep 29 14:07:37 crc kubenswrapper[4634]: I0929 14:07:37.027561 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerDied","Data":"cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5"} Sep 29 14:07:37 crc kubenswrapper[4634]: I0929 14:07:37.027597 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerDied","Data":"ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52"} Sep 29 14:07:37 crc kubenswrapper[4634]: I0929 14:07:37.027611 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerDied","Data":"bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043"} Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.690159 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.813449 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-config-data\") pod \"67123c57-e7d0-4982-9aa8-ab2e00905535\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.813549 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-run-httpd\") pod \"67123c57-e7d0-4982-9aa8-ab2e00905535\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.813677 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-scripts\") pod \"67123c57-e7d0-4982-9aa8-ab2e00905535\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.813776 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-log-httpd\") pod \"67123c57-e7d0-4982-9aa8-ab2e00905535\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.813809 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-sg-core-conf-yaml\") pod \"67123c57-e7d0-4982-9aa8-ab2e00905535\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.813830 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzmzb\" (UniqueName: \"kubernetes.io/projected/67123c57-e7d0-4982-9aa8-ab2e00905535-kube-api-access-kzmzb\") pod \"67123c57-e7d0-4982-9aa8-ab2e00905535\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.813904 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-combined-ca-bundle\") pod \"67123c57-e7d0-4982-9aa8-ab2e00905535\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.813945 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-ceilometer-tls-certs\") pod \"67123c57-e7d0-4982-9aa8-ab2e00905535\" (UID: \"67123c57-e7d0-4982-9aa8-ab2e00905535\") " Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.814634 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "67123c57-e7d0-4982-9aa8-ab2e00905535" (UID: "67123c57-e7d0-4982-9aa8-ab2e00905535"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.815503 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "67123c57-e7d0-4982-9aa8-ab2e00905535" (UID: "67123c57-e7d0-4982-9aa8-ab2e00905535"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.822435 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67123c57-e7d0-4982-9aa8-ab2e00905535-kube-api-access-kzmzb" (OuterVolumeSpecName: "kube-api-access-kzmzb") pod "67123c57-e7d0-4982-9aa8-ab2e00905535" (UID: "67123c57-e7d0-4982-9aa8-ab2e00905535"). InnerVolumeSpecName "kube-api-access-kzmzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.823500 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-scripts" (OuterVolumeSpecName: "scripts") pod "67123c57-e7d0-4982-9aa8-ab2e00905535" (UID: "67123c57-e7d0-4982-9aa8-ab2e00905535"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.854166 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "67123c57-e7d0-4982-9aa8-ab2e00905535" (UID: "67123c57-e7d0-4982-9aa8-ab2e00905535"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.887706 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "67123c57-e7d0-4982-9aa8-ab2e00905535" (UID: "67123c57-e7d0-4982-9aa8-ab2e00905535"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.910008 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67123c57-e7d0-4982-9aa8-ab2e00905535" (UID: "67123c57-e7d0-4982-9aa8-ab2e00905535"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.917727 4634 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.917765 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.917775 4634 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/67123c57-e7d0-4982-9aa8-ab2e00905535-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.917785 4634 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.917795 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzmzb\" (UniqueName: \"kubernetes.io/projected/67123c57-e7d0-4982-9aa8-ab2e00905535-kube-api-access-kzmzb\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.917806 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.917815 4634 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:39 crc kubenswrapper[4634]: I0929 14:07:39.933133 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-config-data" (OuterVolumeSpecName: "config-data") pod "67123c57-e7d0-4982-9aa8-ab2e00905535" (UID: "67123c57-e7d0-4982-9aa8-ab2e00905535"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.020689 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67123c57-e7d0-4982-9aa8-ab2e00905535-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.072415 4634 generic.go:334] "Generic (PLEG): container finished" podID="e00a014d-19be-46d4-91e0-0b9a34160195" containerID="e51de5759532a1462c272d8c9a3f4b7a790fc30ef4279d18b24024b7218795e8" exitCode=0 Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.072490 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dvmgw" event={"ID":"e00a014d-19be-46d4-91e0-0b9a34160195","Type":"ContainerDied","Data":"e51de5759532a1462c272d8c9a3f4b7a790fc30ef4279d18b24024b7218795e8"} Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.077720 4634 generic.go:334] "Generic (PLEG): container finished" podID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerID="84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9" exitCode=0 Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.077760 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerDied","Data":"84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9"} Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.077796 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"67123c57-e7d0-4982-9aa8-ab2e00905535","Type":"ContainerDied","Data":"b32901a2edb3b859c8e26de3c87ec7599542184e8d40f26985a868a8f4429048"} Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.077808 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.077829 4634 scope.go:117] "RemoveContainer" containerID="cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.115844 4634 scope.go:117] "RemoveContainer" containerID="ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.161353 4634 scope.go:117] "RemoveContainer" containerID="bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.161469 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.186595 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.202256 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.202867 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="proxy-httpd" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.202891 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="proxy-httpd" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.202915 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="ceilometer-notification-agent" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.202922 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="ceilometer-notification-agent" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.202936 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b390039e-04d4-492f-b211-091f63ab658b" containerName="init" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.202943 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b390039e-04d4-492f-b211-091f63ab658b" containerName="init" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.202968 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="ceilometer-central-agent" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.202973 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="ceilometer-central-agent" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.202988 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="sg-core" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.202995 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="sg-core" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.203009 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b390039e-04d4-492f-b211-091f63ab658b" containerName="dnsmasq-dns" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.203016 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b390039e-04d4-492f-b211-091f63ab658b" containerName="dnsmasq-dns" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.203235 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="ceilometer-notification-agent" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.203258 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="proxy-httpd" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.203275 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="sg-core" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.203284 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b390039e-04d4-492f-b211-091f63ab658b" containerName="dnsmasq-dns" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.203293 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" containerName="ceilometer-central-agent" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.205810 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.208937 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.210311 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.211145 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.217437 4634 scope.go:117] "RemoveContainer" containerID="84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.219941 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.273421 4634 scope.go:117] "RemoveContainer" containerID="cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.275172 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5\": container with ID starting with cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5 not found: ID does not exist" containerID="cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.275233 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5"} err="failed to get container status \"cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5\": rpc error: code = NotFound desc = could not find container \"cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5\": container with ID starting with cc973e7b1e507d803eff10c13913ceaeb44e4532267a08f78f046bde240572b5 not found: ID does not exist" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.275273 4634 scope.go:117] "RemoveContainer" containerID="ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.275578 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52\": container with ID starting with ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52 not found: ID does not exist" containerID="ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.275602 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52"} err="failed to get container status \"ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52\": rpc error: code = NotFound desc = could not find container \"ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52\": container with ID starting with ef2a9d99162f45cfabfd8041701f47fa26f1bf813cae3e7209ea825363514c52 not found: ID does not exist" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.275617 4634 scope.go:117] "RemoveContainer" containerID="bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.276031 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043\": container with ID starting with bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043 not found: ID does not exist" containerID="bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.276059 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043"} err="failed to get container status \"bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043\": rpc error: code = NotFound desc = could not find container \"bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043\": container with ID starting with bf612bf75f40cba544b9c019a82e7143d6d402221c208b082b8a0a5f51ad2043 not found: ID does not exist" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.276077 4634 scope.go:117] "RemoveContainer" containerID="84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9" Sep 29 14:07:40 crc kubenswrapper[4634]: E0929 14:07:40.276320 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9\": container with ID starting with 84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9 not found: ID does not exist" containerID="84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.276347 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9"} err="failed to get container status \"84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9\": rpc error: code = NotFound desc = could not find container \"84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9\": container with ID starting with 84eb8e32c62bba128a16678899148f28d73fa9aec8e84145315c4da0203df4c9 not found: ID does not exist" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.340512 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.340701 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtj4k\" (UniqueName: \"kubernetes.io/projected/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-kube-api-access-gtj4k\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.340747 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-log-httpd\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.340788 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.340849 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.341055 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-config-data\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.341255 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-run-httpd\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.341635 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-scripts\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.444215 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-scripts\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.444305 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.444408 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtj4k\" (UniqueName: \"kubernetes.io/projected/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-kube-api-access-gtj4k\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.444459 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-log-httpd\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.444512 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.444592 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.444617 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-config-data\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.444702 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-run-httpd\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.445018 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-log-httpd\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.445499 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-run-httpd\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.449212 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.449990 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.450725 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-config-data\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.460700 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-scripts\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.466329 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.470384 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtj4k\" (UniqueName: \"kubernetes.io/projected/2985415a-1cb2-4f9a-9a10-c615ddb91dbd-kube-api-access-gtj4k\") pod \"ceilometer-0\" (UID: \"2985415a-1cb2-4f9a-9a10-c615ddb91dbd\") " pod="openstack/ceilometer-0" Sep 29 14:07:40 crc kubenswrapper[4634]: I0929 14:07:40.554153 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.133560 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 14:07:41 crc kubenswrapper[4634]: W0929 14:07:41.145336 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2985415a_1cb2_4f9a_9a10_c615ddb91dbd.slice/crio-76de67c02c2a37c7e434126bdecd479a383be5259b1e97c40bb91fe1a676cd8a WatchSource:0}: Error finding container 76de67c02c2a37c7e434126bdecd479a383be5259b1e97c40bb91fe1a676cd8a: Status 404 returned error can't find the container with id 76de67c02c2a37c7e434126bdecd479a383be5259b1e97c40bb91fe1a676cd8a Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.284419 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mhpvz" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="registry-server" probeResult="failure" output=< Sep 29 14:07:41 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:07:41 crc kubenswrapper[4634]: > Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.511768 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.677409 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcm7q\" (UniqueName: \"kubernetes.io/projected/e00a014d-19be-46d4-91e0-0b9a34160195-kube-api-access-jcm7q\") pod \"e00a014d-19be-46d4-91e0-0b9a34160195\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.677574 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-config-data\") pod \"e00a014d-19be-46d4-91e0-0b9a34160195\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.677617 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-scripts\") pod \"e00a014d-19be-46d4-91e0-0b9a34160195\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.678169 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-combined-ca-bundle\") pod \"e00a014d-19be-46d4-91e0-0b9a34160195\" (UID: \"e00a014d-19be-46d4-91e0-0b9a34160195\") " Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.685473 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e00a014d-19be-46d4-91e0-0b9a34160195-kube-api-access-jcm7q" (OuterVolumeSpecName: "kube-api-access-jcm7q") pod "e00a014d-19be-46d4-91e0-0b9a34160195" (UID: "e00a014d-19be-46d4-91e0-0b9a34160195"). InnerVolumeSpecName "kube-api-access-jcm7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.685690 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-scripts" (OuterVolumeSpecName: "scripts") pod "e00a014d-19be-46d4-91e0-0b9a34160195" (UID: "e00a014d-19be-46d4-91e0-0b9a34160195"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.724307 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e00a014d-19be-46d4-91e0-0b9a34160195" (UID: "e00a014d-19be-46d4-91e0-0b9a34160195"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.730657 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-config-data" (OuterVolumeSpecName: "config-data") pod "e00a014d-19be-46d4-91e0-0b9a34160195" (UID: "e00a014d-19be-46d4-91e0-0b9a34160195"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.782313 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcm7q\" (UniqueName: \"kubernetes.io/projected/e00a014d-19be-46d4-91e0-0b9a34160195-kube-api-access-jcm7q\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.782794 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.782804 4634 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:41 crc kubenswrapper[4634]: I0929 14:07:41.782813 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e00a014d-19be-46d4-91e0-0b9a34160195-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.107283 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dvmgw" Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.107315 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dvmgw" event={"ID":"e00a014d-19be-46d4-91e0-0b9a34160195","Type":"ContainerDied","Data":"853e40ab18799d0d92ef382d81618a3a9ff1f8d3b7e5eb642526ef0a6b28e60d"} Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.107393 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="853e40ab18799d0d92ef382d81618a3a9ff1f8d3b7e5eb642526ef0a6b28e60d" Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.125597 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67123c57-e7d0-4982-9aa8-ab2e00905535" path="/var/lib/kubelet/pods/67123c57-e7d0-4982-9aa8-ab2e00905535/volumes" Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.126788 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2985415a-1cb2-4f9a-9a10-c615ddb91dbd","Type":"ContainerStarted","Data":"40732d20f2ef4abde36befb1e2f3ee4782b6e590ca3a5e95e9c6a04aeb34477e"} Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.126820 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2985415a-1cb2-4f9a-9a10-c615ddb91dbd","Type":"ContainerStarted","Data":"76de67c02c2a37c7e434126bdecd479a383be5259b1e97c40bb91fe1a676cd8a"} Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.287995 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.288059 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.306448 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.306793 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="09857ee0-d835-4c75-9523-ef30395398c6" containerName="nova-scheduler-scheduler" containerID="cri-o://e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3" gracePeriod=30 Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.322640 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.347859 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.348411 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-log" containerID="cri-o://e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36" gracePeriod=30 Sep 29 14:07:42 crc kubenswrapper[4634]: I0929 14:07:42.348513 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-metadata" containerID="cri-o://6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b" gracePeriod=30 Sep 29 14:07:42 crc kubenswrapper[4634]: E0929 14:07:42.655205 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 14:07:42 crc kubenswrapper[4634]: E0929 14:07:42.657249 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 14:07:42 crc kubenswrapper[4634]: E0929 14:07:42.660925 4634 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 14:07:42 crc kubenswrapper[4634]: E0929 14:07:42.661038 4634 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="09857ee0-d835-4c75-9523-ef30395398c6" containerName="nova-scheduler-scheduler" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.151328 4634 generic.go:334] "Generic (PLEG): container finished" podID="4027fd6b-2391-4fbc-b503-9e698682b404" containerID="e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36" exitCode=143 Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.151840 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4027fd6b-2391-4fbc-b503-9e698682b404","Type":"ContainerDied","Data":"e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36"} Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.164426 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-log" containerID="cri-o://d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5" gracePeriod=30 Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.164589 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2985415a-1cb2-4f9a-9a10-c615ddb91dbd","Type":"ContainerStarted","Data":"311a6a92e428fa7251e2ccb43f82e9f4185aea84afd37b96c007121e40901432"} Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.165070 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-api" containerID="cri-o://bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082" gracePeriod=30 Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.181401 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": EOF" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.181401 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": EOF" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.679147 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.866788 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-combined-ca-bundle\") pod \"09857ee0-d835-4c75-9523-ef30395398c6\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.867202 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-config-data\") pod \"09857ee0-d835-4c75-9523-ef30395398c6\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.867495 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7wqr\" (UniqueName: \"kubernetes.io/projected/09857ee0-d835-4c75-9523-ef30395398c6-kube-api-access-v7wqr\") pod \"09857ee0-d835-4c75-9523-ef30395398c6\" (UID: \"09857ee0-d835-4c75-9523-ef30395398c6\") " Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.879640 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09857ee0-d835-4c75-9523-ef30395398c6-kube-api-access-v7wqr" (OuterVolumeSpecName: "kube-api-access-v7wqr") pod "09857ee0-d835-4c75-9523-ef30395398c6" (UID: "09857ee0-d835-4c75-9523-ef30395398c6"). InnerVolumeSpecName "kube-api-access-v7wqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.907457 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-config-data" (OuterVolumeSpecName: "config-data") pod "09857ee0-d835-4c75-9523-ef30395398c6" (UID: "09857ee0-d835-4c75-9523-ef30395398c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.913966 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09857ee0-d835-4c75-9523-ef30395398c6" (UID: "09857ee0-d835-4c75-9523-ef30395398c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.970680 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.970726 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09857ee0-d835-4c75-9523-ef30395398c6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:43 crc kubenswrapper[4634]: I0929 14:07:43.970738 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7wqr\" (UniqueName: \"kubernetes.io/projected/09857ee0-d835-4c75-9523-ef30395398c6-kube-api-access-v7wqr\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.179252 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2985415a-1cb2-4f9a-9a10-c615ddb91dbd","Type":"ContainerStarted","Data":"8d61b169636e77087bf0431299942e164766e270bcd1de3082402831001b3ad7"} Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.182018 4634 generic.go:334] "Generic (PLEG): container finished" podID="c792bb76-dbea-464c-916e-335db43f086f" containerID="d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5" exitCode=143 Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.182112 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c792bb76-dbea-464c-916e-335db43f086f","Type":"ContainerDied","Data":"d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5"} Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.184318 4634 generic.go:334] "Generic (PLEG): container finished" podID="09857ee0-d835-4c75-9523-ef30395398c6" containerID="e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3" exitCode=0 Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.184398 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.184534 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"09857ee0-d835-4c75-9523-ef30395398c6","Type":"ContainerDied","Data":"e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3"} Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.184682 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"09857ee0-d835-4c75-9523-ef30395398c6","Type":"ContainerDied","Data":"0eb8d49874e6751ae9b80a1405fccc08c169692d5bb77426a2b2049d335f67ee"} Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.184756 4634 scope.go:117] "RemoveContainer" containerID="e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.218928 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.225312 4634 scope.go:117] "RemoveContainer" containerID="e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.226037 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:44 crc kubenswrapper[4634]: E0929 14:07:44.226560 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3\": container with ID starting with e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3 not found: ID does not exist" containerID="e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.226631 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3"} err="failed to get container status \"e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3\": rpc error: code = NotFound desc = could not find container \"e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3\": container with ID starting with e118587a6d8c5655e61d800f9740b805a93cb32019be804c120f16a02bfc9db3 not found: ID does not exist" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.249097 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:44 crc kubenswrapper[4634]: E0929 14:07:44.249600 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e00a014d-19be-46d4-91e0-0b9a34160195" containerName="nova-manage" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.249622 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="e00a014d-19be-46d4-91e0-0b9a34160195" containerName="nova-manage" Sep 29 14:07:44 crc kubenswrapper[4634]: E0929 14:07:44.249650 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09857ee0-d835-4c75-9523-ef30395398c6" containerName="nova-scheduler-scheduler" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.249657 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="09857ee0-d835-4c75-9523-ef30395398c6" containerName="nova-scheduler-scheduler" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.249885 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="e00a014d-19be-46d4-91e0-0b9a34160195" containerName="nova-manage" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.249931 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="09857ee0-d835-4c75-9523-ef30395398c6" containerName="nova-scheduler-scheduler" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.250687 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.258606 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.266686 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.381301 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6w42\" (UniqueName: \"kubernetes.io/projected/8fca2b6d-9b01-4498-bdc1-619d2b52d173-kube-api-access-p6w42\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.381782 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fca2b6d-9b01-4498-bdc1-619d2b52d173-config-data\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.381816 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fca2b6d-9b01-4498-bdc1-619d2b52d173-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.484715 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fca2b6d-9b01-4498-bdc1-619d2b52d173-config-data\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.484778 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fca2b6d-9b01-4498-bdc1-619d2b52d173-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.484872 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6w42\" (UniqueName: \"kubernetes.io/projected/8fca2b6d-9b01-4498-bdc1-619d2b52d173-kube-api-access-p6w42\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.501694 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fca2b6d-9b01-4498-bdc1-619d2b52d173-config-data\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.501710 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fca2b6d-9b01-4498-bdc1-619d2b52d173-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.510723 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6w42\" (UniqueName: \"kubernetes.io/projected/8fca2b6d-9b01-4498-bdc1-619d2b52d173-kube-api-access-p6w42\") pod \"nova-scheduler-0\" (UID: \"8fca2b6d-9b01-4498-bdc1-619d2b52d173\") " pod="openstack/nova-scheduler-0" Sep 29 14:07:44 crc kubenswrapper[4634]: I0929 14:07:44.573763 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 14:07:45 crc kubenswrapper[4634]: I0929 14:07:45.086481 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 14:07:45 crc kubenswrapper[4634]: I0929 14:07:45.274235 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8fca2b6d-9b01-4498-bdc1-619d2b52d173","Type":"ContainerStarted","Data":"69717faf3f94106ac52c46f9c4182bf5049f254f2251e67db3fca9448fca7f7a"} Sep 29 14:07:45 crc kubenswrapper[4634]: I0929 14:07:45.513906 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:37828->10.217.0.196:8775: read: connection reset by peer" Sep 29 14:07:45 crc kubenswrapper[4634]: I0929 14:07:45.514264 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.196:8775/\": read tcp 10.217.0.2:37832->10.217.0.196:8775: read: connection reset by peer" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.072470 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.138211 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09857ee0-d835-4c75-9523-ef30395398c6" path="/var/lib/kubelet/pods/09857ee0-d835-4c75-9523-ef30395398c6/volumes" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.152744 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4027fd6b-2391-4fbc-b503-9e698682b404-logs\") pod \"4027fd6b-2391-4fbc-b503-9e698682b404\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.152798 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nm87b\" (UniqueName: \"kubernetes.io/projected/4027fd6b-2391-4fbc-b503-9e698682b404-kube-api-access-nm87b\") pod \"4027fd6b-2391-4fbc-b503-9e698682b404\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.153075 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-nova-metadata-tls-certs\") pod \"4027fd6b-2391-4fbc-b503-9e698682b404\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.153127 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-combined-ca-bundle\") pod \"4027fd6b-2391-4fbc-b503-9e698682b404\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.153189 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-config-data\") pod \"4027fd6b-2391-4fbc-b503-9e698682b404\" (UID: \"4027fd6b-2391-4fbc-b503-9e698682b404\") " Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.153420 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4027fd6b-2391-4fbc-b503-9e698682b404-logs" (OuterVolumeSpecName: "logs") pod "4027fd6b-2391-4fbc-b503-9e698682b404" (UID: "4027fd6b-2391-4fbc-b503-9e698682b404"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.153712 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4027fd6b-2391-4fbc-b503-9e698682b404-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.187382 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4027fd6b-2391-4fbc-b503-9e698682b404-kube-api-access-nm87b" (OuterVolumeSpecName: "kube-api-access-nm87b") pod "4027fd6b-2391-4fbc-b503-9e698682b404" (UID: "4027fd6b-2391-4fbc-b503-9e698682b404"). InnerVolumeSpecName "kube-api-access-nm87b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.222895 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-config-data" (OuterVolumeSpecName: "config-data") pod "4027fd6b-2391-4fbc-b503-9e698682b404" (UID: "4027fd6b-2391-4fbc-b503-9e698682b404"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.256969 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.257005 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nm87b\" (UniqueName: \"kubernetes.io/projected/4027fd6b-2391-4fbc-b503-9e698682b404-kube-api-access-nm87b\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.262244 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4027fd6b-2391-4fbc-b503-9e698682b404" (UID: "4027fd6b-2391-4fbc-b503-9e698682b404"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.288716 4634 generic.go:334] "Generic (PLEG): container finished" podID="4027fd6b-2391-4fbc-b503-9e698682b404" containerID="6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b" exitCode=0 Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.288793 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4027fd6b-2391-4fbc-b503-9e698682b404","Type":"ContainerDied","Data":"6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b"} Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.288837 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4027fd6b-2391-4fbc-b503-9e698682b404","Type":"ContainerDied","Data":"d5e86208bb9f34bfa624d4f19fc1e12bc5efbf3cd437d822997e3249ed299b8d"} Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.288860 4634 scope.go:117] "RemoveContainer" containerID="6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.289002 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.295722 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4027fd6b-2391-4fbc-b503-9e698682b404" (UID: "4027fd6b-2391-4fbc-b503-9e698682b404"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.299486 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2985415a-1cb2-4f9a-9a10-c615ddb91dbd","Type":"ContainerStarted","Data":"1ed33dbecb45ac16c6ec41a27a1592af9a65473e8d5badbdee659ce72fc8580d"} Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.299922 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.301893 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8fca2b6d-9b01-4498-bdc1-619d2b52d173","Type":"ContainerStarted","Data":"d19d863d45d3c129d71f2f73654df74d4329aa4866881fe11ff0bbbe28e8caff"} Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.344918 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.481978104 podStartE2EDuration="6.344890463s" podCreationTimestamp="2025-09-29 14:07:40 +0000 UTC" firstStartedPulling="2025-09-29 14:07:41.150441594 +0000 UTC m=+1391.719169343" lastFinishedPulling="2025-09-29 14:07:45.013353953 +0000 UTC m=+1395.582081702" observedRunningTime="2025-09-29 14:07:46.331938593 +0000 UTC m=+1396.900666342" watchObservedRunningTime="2025-09-29 14:07:46.344890463 +0000 UTC m=+1396.913618212" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.362197 4634 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.362236 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4027fd6b-2391-4fbc-b503-9e698682b404-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.363356 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.363326406 podStartE2EDuration="2.363326406s" podCreationTimestamp="2025-09-29 14:07:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:46.350673474 +0000 UTC m=+1396.919401223" watchObservedRunningTime="2025-09-29 14:07:46.363326406 +0000 UTC m=+1396.932054155" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.402100 4634 scope.go:117] "RemoveContainer" containerID="e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.437328 4634 scope.go:117] "RemoveContainer" containerID="6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b" Sep 29 14:07:46 crc kubenswrapper[4634]: E0929 14:07:46.437970 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b\": container with ID starting with 6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b not found: ID does not exist" containerID="6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.438018 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b"} err="failed to get container status \"6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b\": rpc error: code = NotFound desc = could not find container \"6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b\": container with ID starting with 6db7774d8ac3ac5ad222a3c7bbe869ea3dc61e44b197f71e209bdc06dfe79c4b not found: ID does not exist" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.438061 4634 scope.go:117] "RemoveContainer" containerID="e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36" Sep 29 14:07:46 crc kubenswrapper[4634]: E0929 14:07:46.438328 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36\": container with ID starting with e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36 not found: ID does not exist" containerID="e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.438360 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36"} err="failed to get container status \"e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36\": rpc error: code = NotFound desc = could not find container \"e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36\": container with ID starting with e384a40e8d262dc2a8bc43a40863845d58434e94b10fe832a2551015ccc44c36 not found: ID does not exist" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.631265 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.648305 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.669715 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:46 crc kubenswrapper[4634]: E0929 14:07:46.670844 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-log" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.670913 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-log" Sep 29 14:07:46 crc kubenswrapper[4634]: E0929 14:07:46.670981 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-metadata" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.671044 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-metadata" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.671338 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-log" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.671421 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" containerName="nova-metadata-metadata" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.672803 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.680245 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.680337 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.696842 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.770667 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.770771 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-config-data\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.770803 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff9c4\" (UniqueName: \"kubernetes.io/projected/6b03940d-5de0-4326-b16f-c436f6637a92-kube-api-access-ff9c4\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.770828 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.771332 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b03940d-5de0-4326-b16f-c436f6637a92-logs\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.876148 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.876301 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-config-data\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.876342 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff9c4\" (UniqueName: \"kubernetes.io/projected/6b03940d-5de0-4326-b16f-c436f6637a92-kube-api-access-ff9c4\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.876370 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.876462 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b03940d-5de0-4326-b16f-c436f6637a92-logs\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.876969 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b03940d-5de0-4326-b16f-c436f6637a92-logs\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.880829 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.882852 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-config-data\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.888727 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b03940d-5de0-4326-b16f-c436f6637a92-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:46 crc kubenswrapper[4634]: I0929 14:07:46.896064 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff9c4\" (UniqueName: \"kubernetes.io/projected/6b03940d-5de0-4326-b16f-c436f6637a92-kube-api-access-ff9c4\") pod \"nova-metadata-0\" (UID: \"6b03940d-5de0-4326-b16f-c436f6637a92\") " pod="openstack/nova-metadata-0" Sep 29 14:07:47 crc kubenswrapper[4634]: I0929 14:07:47.032207 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 14:07:47 crc kubenswrapper[4634]: I0929 14:07:47.543217 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 14:07:48 crc kubenswrapper[4634]: I0929 14:07:48.168115 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4027fd6b-2391-4fbc-b503-9e698682b404" path="/var/lib/kubelet/pods/4027fd6b-2391-4fbc-b503-9e698682b404/volumes" Sep 29 14:07:48 crc kubenswrapper[4634]: I0929 14:07:48.339794 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b03940d-5de0-4326-b16f-c436f6637a92","Type":"ContainerStarted","Data":"ed241d7591bef03c60c29bbde76eb65ff0ccd27b5a2f4cd7df135031f0946c73"} Sep 29 14:07:48 crc kubenswrapper[4634]: I0929 14:07:48.339864 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b03940d-5de0-4326-b16f-c436f6637a92","Type":"ContainerStarted","Data":"5e8f79bf79abec144008269cbbba4883de7894a9819d4e8f17117d3393695202"} Sep 29 14:07:48 crc kubenswrapper[4634]: I0929 14:07:48.339880 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6b03940d-5de0-4326-b16f-c436f6637a92","Type":"ContainerStarted","Data":"eee7f86a347cac365314213effd7a9506c60530ea0951fb8bc7a0ae845860beb"} Sep 29 14:07:48 crc kubenswrapper[4634]: I0929 14:07:48.372670 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.372646661 podStartE2EDuration="2.372646661s" podCreationTimestamp="2025-09-29 14:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:48.371437699 +0000 UTC m=+1398.940165448" watchObservedRunningTime="2025-09-29 14:07:48.372646661 +0000 UTC m=+1398.941374410" Sep 29 14:07:49 crc kubenswrapper[4634]: I0929 14:07:49.575307 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.245815 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mhpvz" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="registry-server" probeResult="failure" output=< Sep 29 14:07:51 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:07:51 crc kubenswrapper[4634]: > Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.349913 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.390952 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l9m9\" (UniqueName: \"kubernetes.io/projected/c792bb76-dbea-464c-916e-335db43f086f-kube-api-access-8l9m9\") pod \"c792bb76-dbea-464c-916e-335db43f086f\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.391057 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-public-tls-certs\") pod \"c792bb76-dbea-464c-916e-335db43f086f\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.391266 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-combined-ca-bundle\") pod \"c792bb76-dbea-464c-916e-335db43f086f\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.391369 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c792bb76-dbea-464c-916e-335db43f086f-logs\") pod \"c792bb76-dbea-464c-916e-335db43f086f\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.391412 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-config-data\") pod \"c792bb76-dbea-464c-916e-335db43f086f\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.391529 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-internal-tls-certs\") pod \"c792bb76-dbea-464c-916e-335db43f086f\" (UID: \"c792bb76-dbea-464c-916e-335db43f086f\") " Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.392413 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c792bb76-dbea-464c-916e-335db43f086f-logs" (OuterVolumeSpecName: "logs") pod "c792bb76-dbea-464c-916e-335db43f086f" (UID: "c792bb76-dbea-464c-916e-335db43f086f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.402798 4634 generic.go:334] "Generic (PLEG): container finished" podID="c792bb76-dbea-464c-916e-335db43f086f" containerID="bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082" exitCode=0 Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.402866 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c792bb76-dbea-464c-916e-335db43f086f","Type":"ContainerDied","Data":"bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082"} Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.402909 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c792bb76-dbea-464c-916e-335db43f086f","Type":"ContainerDied","Data":"0d95e4a959c0f98984fc268d457226e4566f94507a6225b34c99bb019c39bdf3"} Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.402932 4634 scope.go:117] "RemoveContainer" containerID="bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.403183 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.421916 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c792bb76-dbea-464c-916e-335db43f086f-kube-api-access-8l9m9" (OuterVolumeSpecName: "kube-api-access-8l9m9") pod "c792bb76-dbea-464c-916e-335db43f086f" (UID: "c792bb76-dbea-464c-916e-335db43f086f"). InnerVolumeSpecName "kube-api-access-8l9m9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.446711 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c792bb76-dbea-464c-916e-335db43f086f" (UID: "c792bb76-dbea-464c-916e-335db43f086f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.461770 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-config-data" (OuterVolumeSpecName: "config-data") pod "c792bb76-dbea-464c-916e-335db43f086f" (UID: "c792bb76-dbea-464c-916e-335db43f086f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.488761 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c792bb76-dbea-464c-916e-335db43f086f" (UID: "c792bb76-dbea-464c-916e-335db43f086f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.493785 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.493819 4634 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c792bb76-dbea-464c-916e-335db43f086f-logs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.493829 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.493839 4634 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.493848 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l9m9\" (UniqueName: \"kubernetes.io/projected/c792bb76-dbea-464c-916e-335db43f086f-kube-api-access-8l9m9\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.517450 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c792bb76-dbea-464c-916e-335db43f086f" (UID: "c792bb76-dbea-464c-916e-335db43f086f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.524851 4634 scope.go:117] "RemoveContainer" containerID="d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.547795 4634 scope.go:117] "RemoveContainer" containerID="bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082" Sep 29 14:07:51 crc kubenswrapper[4634]: E0929 14:07:51.548591 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082\": container with ID starting with bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082 not found: ID does not exist" containerID="bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.548623 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082"} err="failed to get container status \"bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082\": rpc error: code = NotFound desc = could not find container \"bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082\": container with ID starting with bda27bea5cc50a37446a8f67151f7947be13c81f812f0d2c2d9f899889ec4082 not found: ID does not exist" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.548649 4634 scope.go:117] "RemoveContainer" containerID="d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5" Sep 29 14:07:51 crc kubenswrapper[4634]: E0929 14:07:51.549128 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5\": container with ID starting with d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5 not found: ID does not exist" containerID="d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.549180 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5"} err="failed to get container status \"d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5\": rpc error: code = NotFound desc = could not find container \"d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5\": container with ID starting with d075da9255c5063755fd206ffc515f957affff79df7784ef74d1754b832424a5 not found: ID does not exist" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.596943 4634 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c792bb76-dbea-464c-916e-335db43f086f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.770333 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.781237 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.808199 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:51 crc kubenswrapper[4634]: E0929 14:07:51.808793 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-api" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.808839 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-api" Sep 29 14:07:51 crc kubenswrapper[4634]: E0929 14:07:51.808877 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-log" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.808887 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-log" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.809197 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-api" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.809233 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="c792bb76-dbea-464c-916e-335db43f086f" containerName="nova-api-log" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.810449 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.817504 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.817732 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.819019 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.820431 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.916817 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.916994 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-config-data\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.917123 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.917353 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hrw7\" (UniqueName: \"kubernetes.io/projected/3eef7d23-1a73-41a3-b80a-d0be5789f09d-kube-api-access-6hrw7\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.917564 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-public-tls-certs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:51 crc kubenswrapper[4634]: I0929 14:07:51.917749 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3eef7d23-1a73-41a3-b80a-d0be5789f09d-logs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.020887 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-public-tls-certs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.021011 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3eef7d23-1a73-41a3-b80a-d0be5789f09d-logs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.021167 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.021846 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-config-data\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.021909 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.021978 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hrw7\" (UniqueName: \"kubernetes.io/projected/3eef7d23-1a73-41a3-b80a-d0be5789f09d-kube-api-access-6hrw7\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.021987 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3eef7d23-1a73-41a3-b80a-d0be5789f09d-logs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.025958 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.028042 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-public-tls-certs\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.029733 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-config-data\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.030571 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eef7d23-1a73-41a3-b80a-d0be5789f09d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.033357 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.033830 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.046129 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hrw7\" (UniqueName: \"kubernetes.io/projected/3eef7d23-1a73-41a3-b80a-d0be5789f09d-kube-api-access-6hrw7\") pod \"nova-api-0\" (UID: \"3eef7d23-1a73-41a3-b80a-d0be5789f09d\") " pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.130409 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c792bb76-dbea-464c-916e-335db43f086f" path="/var/lib/kubelet/pods/c792bb76-dbea-464c-916e-335db43f086f/volumes" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.146576 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 14:07:52 crc kubenswrapper[4634]: I0929 14:07:52.602213 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 14:07:52 crc kubenswrapper[4634]: W0929 14:07:52.605341 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3eef7d23_1a73_41a3_b80a_d0be5789f09d.slice/crio-005c4c86bb3bd65ec49cca8df36f516c072ba9ee0e7656019f264dbd3299fb58 WatchSource:0}: Error finding container 005c4c86bb3bd65ec49cca8df36f516c072ba9ee0e7656019f264dbd3299fb58: Status 404 returned error can't find the container with id 005c4c86bb3bd65ec49cca8df36f516c072ba9ee0e7656019f264dbd3299fb58 Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.436423 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3eef7d23-1a73-41a3-b80a-d0be5789f09d","Type":"ContainerStarted","Data":"7b7833e7b631a2595c90affa51514c214697995a8fe30c018fc485caf989c059"} Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.436790 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3eef7d23-1a73-41a3-b80a-d0be5789f09d","Type":"ContainerStarted","Data":"c23a26b8dddd0e665b8981ed90d91aca0aa84a37ad5ca90a13ec14c02aa73928"} Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.436814 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3eef7d23-1a73-41a3-b80a-d0be5789f09d","Type":"ContainerStarted","Data":"005c4c86bb3bd65ec49cca8df36f516c072ba9ee0e7656019f264dbd3299fb58"} Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.467528 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.46748935 podStartE2EDuration="2.46748935s" podCreationTimestamp="2025-09-29 14:07:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:07:53.458065452 +0000 UTC m=+1404.026793201" watchObservedRunningTime="2025-09-29 14:07:53.46748935 +0000 UTC m=+1404.036217139" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.703503 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k4q8r"] Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.707336 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.730175 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k4q8r"] Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.761667 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-utilities\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.762082 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-catalog-content\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.762341 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59rhz\" (UniqueName: \"kubernetes.io/projected/8d374a7a-9cdc-4308-a100-96b1efe56b6b-kube-api-access-59rhz\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.863773 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-utilities\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.863843 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-catalog-content\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.863938 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59rhz\" (UniqueName: \"kubernetes.io/projected/8d374a7a-9cdc-4308-a100-96b1efe56b6b-kube-api-access-59rhz\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.864772 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-utilities\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.864848 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-catalog-content\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:53 crc kubenswrapper[4634]: I0929 14:07:53.885340 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59rhz\" (UniqueName: \"kubernetes.io/projected/8d374a7a-9cdc-4308-a100-96b1efe56b6b-kube-api-access-59rhz\") pod \"community-operators-k4q8r\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:54 crc kubenswrapper[4634]: I0929 14:07:54.028306 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:07:54 crc kubenswrapper[4634]: I0929 14:07:54.567134 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k4q8r"] Sep 29 14:07:54 crc kubenswrapper[4634]: I0929 14:07:54.574902 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 14:07:54 crc kubenswrapper[4634]: I0929 14:07:54.624734 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 14:07:55 crc kubenswrapper[4634]: I0929 14:07:55.461994 4634 generic.go:334] "Generic (PLEG): container finished" podID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerID="aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4" exitCode=0 Sep 29 14:07:55 crc kubenswrapper[4634]: I0929 14:07:55.462065 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4q8r" event={"ID":"8d374a7a-9cdc-4308-a100-96b1efe56b6b","Type":"ContainerDied","Data":"aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4"} Sep 29 14:07:55 crc kubenswrapper[4634]: I0929 14:07:55.462427 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4q8r" event={"ID":"8d374a7a-9cdc-4308-a100-96b1efe56b6b","Type":"ContainerStarted","Data":"62e17435bd54e208510399be67882d725787f71bdbee527558d5978af518a86b"} Sep 29 14:07:55 crc kubenswrapper[4634]: I0929 14:07:55.509024 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 14:07:57 crc kubenswrapper[4634]: I0929 14:07:57.032467 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 14:07:57 crc kubenswrapper[4634]: I0929 14:07:57.033699 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 14:07:57 crc kubenswrapper[4634]: I0929 14:07:57.490422 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4q8r" event={"ID":"8d374a7a-9cdc-4308-a100-96b1efe56b6b","Type":"ContainerStarted","Data":"9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785"} Sep 29 14:07:58 crc kubenswrapper[4634]: I0929 14:07:58.051402 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6b03940d-5de0-4326-b16f-c436f6637a92" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:07:58 crc kubenswrapper[4634]: I0929 14:07:58.051411 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="6b03940d-5de0-4326-b16f-c436f6637a92" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:07:58 crc kubenswrapper[4634]: I0929 14:07:58.512504 4634 generic.go:334] "Generic (PLEG): container finished" podID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerID="9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785" exitCode=0 Sep 29 14:07:58 crc kubenswrapper[4634]: I0929 14:07:58.512568 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4q8r" event={"ID":"8d374a7a-9cdc-4308-a100-96b1efe56b6b","Type":"ContainerDied","Data":"9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785"} Sep 29 14:07:59 crc kubenswrapper[4634]: I0929 14:07:59.552909 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4q8r" event={"ID":"8d374a7a-9cdc-4308-a100-96b1efe56b6b","Type":"ContainerStarted","Data":"9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50"} Sep 29 14:07:59 crc kubenswrapper[4634]: I0929 14:07:59.585758 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k4q8r" podStartSLOduration=2.811289833 podStartE2EDuration="6.585732902s" podCreationTimestamp="2025-09-29 14:07:53 +0000 UTC" firstStartedPulling="2025-09-29 14:07:55.465023046 +0000 UTC m=+1406.033750795" lastFinishedPulling="2025-09-29 14:07:59.239466075 +0000 UTC m=+1409.808193864" observedRunningTime="2025-09-29 14:07:59.582722973 +0000 UTC m=+1410.151450732" watchObservedRunningTime="2025-09-29 14:07:59.585732902 +0000 UTC m=+1410.154460651" Sep 29 14:08:00 crc kubenswrapper[4634]: I0929 14:08:00.242182 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:08:00 crc kubenswrapper[4634]: I0929 14:08:00.307684 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:08:01 crc kubenswrapper[4634]: I0929 14:08:01.065941 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mhpvz"] Sep 29 14:08:01 crc kubenswrapper[4634]: I0929 14:08:01.576636 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mhpvz" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="registry-server" containerID="cri-o://e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494" gracePeriod=2 Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.137110 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.151363 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.151413 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.293454 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbnp8\" (UniqueName: \"kubernetes.io/projected/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-kube-api-access-nbnp8\") pod \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.293914 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-catalog-content\") pod \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.294043 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-utilities\") pod \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\" (UID: \"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd\") " Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.295069 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-utilities" (OuterVolumeSpecName: "utilities") pod "bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" (UID: "bc3f0c2b-5ce6-4b7a-9094-9577887f53cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.295590 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.300822 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-kube-api-access-nbnp8" (OuterVolumeSpecName: "kube-api-access-nbnp8") pod "bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" (UID: "bc3f0c2b-5ce6-4b7a-9094-9577887f53cd"). InnerVolumeSpecName "kube-api-access-nbnp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.373727 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" (UID: "bc3f0c2b-5ce6-4b7a-9094-9577887f53cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.398043 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbnp8\" (UniqueName: \"kubernetes.io/projected/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-kube-api-access-nbnp8\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.398095 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.589206 4634 generic.go:334] "Generic (PLEG): container finished" podID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerID="e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494" exitCode=0 Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.589300 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhpvz" event={"ID":"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd","Type":"ContainerDied","Data":"e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494"} Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.589338 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mhpvz" event={"ID":"bc3f0c2b-5ce6-4b7a-9094-9577887f53cd","Type":"ContainerDied","Data":"9c7122489e4ef78c12c2005fa0a8430739e0ff73943de51fcc0319c053038540"} Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.589359 4634 scope.go:117] "RemoveContainer" containerID="e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.589573 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mhpvz" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.659838 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mhpvz"] Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.667000 4634 scope.go:117] "RemoveContainer" containerID="502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.678050 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mhpvz"] Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.704861 4634 scope.go:117] "RemoveContainer" containerID="c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.750658 4634 scope.go:117] "RemoveContainer" containerID="e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494" Sep 29 14:08:02 crc kubenswrapper[4634]: E0929 14:08:02.751825 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494\": container with ID starting with e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494 not found: ID does not exist" containerID="e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.751885 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494"} err="failed to get container status \"e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494\": rpc error: code = NotFound desc = could not find container \"e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494\": container with ID starting with e7e2aa2fcab8a8c0349df642fe1496b8a163e2aaba9866c39088d5006d1eb494 not found: ID does not exist" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.751921 4634 scope.go:117] "RemoveContainer" containerID="502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911" Sep 29 14:08:02 crc kubenswrapper[4634]: E0929 14:08:02.757669 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911\": container with ID starting with 502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911 not found: ID does not exist" containerID="502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.757721 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911"} err="failed to get container status \"502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911\": rpc error: code = NotFound desc = could not find container \"502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911\": container with ID starting with 502cc60e4aec3e42c7683719875953456f4ef8cf2049bb9ee6bb8fbe3ff3a911 not found: ID does not exist" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.757752 4634 scope.go:117] "RemoveContainer" containerID="c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3" Sep 29 14:08:02 crc kubenswrapper[4634]: E0929 14:08:02.758250 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3\": container with ID starting with c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3 not found: ID does not exist" containerID="c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3" Sep 29 14:08:02 crc kubenswrapper[4634]: I0929 14:08:02.758320 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3"} err="failed to get container status \"c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3\": rpc error: code = NotFound desc = could not find container \"c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3\": container with ID starting with c7026b5dd1bdf4076126d546001bb01e1726a683098d118bdfb6b0ce5390bda3 not found: ID does not exist" Sep 29 14:08:03 crc kubenswrapper[4634]: I0929 14:08:03.167351 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3eef7d23-1a73-41a3-b80a-d0be5789f09d" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.210:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:08:03 crc kubenswrapper[4634]: I0929 14:08:03.167614 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3eef7d23-1a73-41a3-b80a-d0be5789f09d" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.210:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 14:08:04 crc kubenswrapper[4634]: I0929 14:08:04.029346 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:08:04 crc kubenswrapper[4634]: I0929 14:08:04.030055 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:08:04 crc kubenswrapper[4634]: I0929 14:08:04.124701 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" path="/var/lib/kubelet/pods/bc3f0c2b-5ce6-4b7a-9094-9577887f53cd/volumes" Sep 29 14:08:05 crc kubenswrapper[4634]: I0929 14:08:05.091985 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-k4q8r" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="registry-server" probeResult="failure" output=< Sep 29 14:08:05 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:08:05 crc kubenswrapper[4634]: > Sep 29 14:08:07 crc kubenswrapper[4634]: I0929 14:08:07.047144 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 14:08:07 crc kubenswrapper[4634]: I0929 14:08:07.047665 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 14:08:07 crc kubenswrapper[4634]: I0929 14:08:07.058986 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 14:08:07 crc kubenswrapper[4634]: I0929 14:08:07.060573 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 14:08:10 crc kubenswrapper[4634]: I0929 14:08:10.580911 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 14:08:12 crc kubenswrapper[4634]: I0929 14:08:12.168278 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 14:08:12 crc kubenswrapper[4634]: I0929 14:08:12.169172 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 14:08:12 crc kubenswrapper[4634]: I0929 14:08:12.170367 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 14:08:12 crc kubenswrapper[4634]: I0929 14:08:12.170448 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 14:08:12 crc kubenswrapper[4634]: I0929 14:08:12.181658 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 14:08:12 crc kubenswrapper[4634]: I0929 14:08:12.184698 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 14:08:14 crc kubenswrapper[4634]: I0929 14:08:14.103355 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:08:14 crc kubenswrapper[4634]: I0929 14:08:14.163508 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:08:14 crc kubenswrapper[4634]: I0929 14:08:14.355303 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k4q8r"] Sep 29 14:08:15 crc kubenswrapper[4634]: I0929 14:08:15.763704 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-k4q8r" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="registry-server" containerID="cri-o://9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50" gracePeriod=2 Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.255373 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.260618 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-utilities\") pod \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.260849 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-catalog-content\") pod \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.260953 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59rhz\" (UniqueName: \"kubernetes.io/projected/8d374a7a-9cdc-4308-a100-96b1efe56b6b-kube-api-access-59rhz\") pod \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\" (UID: \"8d374a7a-9cdc-4308-a100-96b1efe56b6b\") " Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.261735 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-utilities" (OuterVolumeSpecName: "utilities") pod "8d374a7a-9cdc-4308-a100-96b1efe56b6b" (UID: "8d374a7a-9cdc-4308-a100-96b1efe56b6b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.269894 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d374a7a-9cdc-4308-a100-96b1efe56b6b-kube-api-access-59rhz" (OuterVolumeSpecName: "kube-api-access-59rhz") pod "8d374a7a-9cdc-4308-a100-96b1efe56b6b" (UID: "8d374a7a-9cdc-4308-a100-96b1efe56b6b"). InnerVolumeSpecName "kube-api-access-59rhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.337335 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d374a7a-9cdc-4308-a100-96b1efe56b6b" (UID: "8d374a7a-9cdc-4308-a100-96b1efe56b6b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.362937 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.362971 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d374a7a-9cdc-4308-a100-96b1efe56b6b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.362981 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59rhz\" (UniqueName: \"kubernetes.io/projected/8d374a7a-9cdc-4308-a100-96b1efe56b6b-kube-api-access-59rhz\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.778312 4634 generic.go:334] "Generic (PLEG): container finished" podID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerID="9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50" exitCode=0 Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.778371 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4q8r" event={"ID":"8d374a7a-9cdc-4308-a100-96b1efe56b6b","Type":"ContainerDied","Data":"9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50"} Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.778412 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k4q8r" event={"ID":"8d374a7a-9cdc-4308-a100-96b1efe56b6b","Type":"ContainerDied","Data":"62e17435bd54e208510399be67882d725787f71bdbee527558d5978af518a86b"} Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.778434 4634 scope.go:117] "RemoveContainer" containerID="9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.778449 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k4q8r" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.816691 4634 scope.go:117] "RemoveContainer" containerID="9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.859060 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k4q8r"] Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.871596 4634 scope.go:117] "RemoveContainer" containerID="aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.872055 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-k4q8r"] Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.914556 4634 scope.go:117] "RemoveContainer" containerID="9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50" Sep 29 14:08:16 crc kubenswrapper[4634]: E0929 14:08:16.915329 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50\": container with ID starting with 9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50 not found: ID does not exist" containerID="9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.915378 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50"} err="failed to get container status \"9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50\": rpc error: code = NotFound desc = could not find container \"9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50\": container with ID starting with 9ddcf33dd401e66a5c24594ba68d018d75cfd4ba5a3bd73bc3535703402bdf50 not found: ID does not exist" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.915416 4634 scope.go:117] "RemoveContainer" containerID="9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785" Sep 29 14:08:16 crc kubenswrapper[4634]: E0929 14:08:16.915900 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785\": container with ID starting with 9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785 not found: ID does not exist" containerID="9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.915982 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785"} err="failed to get container status \"9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785\": rpc error: code = NotFound desc = could not find container \"9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785\": container with ID starting with 9a53771b3d8199d043635bfa4886e500644cfaff3ef80f27666b55241aefa785 not found: ID does not exist" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.916023 4634 scope.go:117] "RemoveContainer" containerID="aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4" Sep 29 14:08:16 crc kubenswrapper[4634]: E0929 14:08:16.916939 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4\": container with ID starting with aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4 not found: ID does not exist" containerID="aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4" Sep 29 14:08:16 crc kubenswrapper[4634]: I0929 14:08:16.916981 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4"} err="failed to get container status \"aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4\": rpc error: code = NotFound desc = could not find container \"aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4\": container with ID starting with aa582d24e12daf68a03fa386d0ae1689045f95726fae8d31ae1b292f7d010ca4 not found: ID does not exist" Sep 29 14:08:18 crc kubenswrapper[4634]: I0929 14:08:18.124717 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" path="/var/lib/kubelet/pods/8d374a7a-9cdc-4308-a100-96b1efe56b6b/volumes" Sep 29 14:08:20 crc kubenswrapper[4634]: I0929 14:08:20.705442 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:08:21 crc kubenswrapper[4634]: I0929 14:08:21.614178 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:08:26 crc kubenswrapper[4634]: I0929 14:08:26.007257 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerName="rabbitmq" containerID="cri-o://6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0" gracePeriod=604795 Sep 29 14:08:27 crc kubenswrapper[4634]: I0929 14:08:27.438961 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerName="rabbitmq" containerID="cri-o://6afea5db50bc1e064128175751bff751b672b7f268ac3699275fe23fdbe35322" gracePeriod=604795 Sep 29 14:08:30 crc kubenswrapper[4634]: I0929 14:08:30.703568 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Sep 29 14:08:31 crc kubenswrapper[4634]: I0929 14:08:31.463582 4634 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.637995 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.681839 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-config-data\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.681901 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-confd\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.681997 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-plugins-conf\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.682041 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-tls\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.682269 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-pod-info\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.682330 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-plugins\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.682402 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-erlang-cookie-secret\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.682444 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-erlang-cookie\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.682467 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-server-conf\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.682493 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.682515 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2skm\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-kube-api-access-c2skm\") pod \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\" (UID: \"dd3a9c91-300c-4510-b7a4-03cf8cbbe729\") " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.685227 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.686558 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.686815 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.700841 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-pod-info" (OuterVolumeSpecName: "pod-info") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.703770 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.716062 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-kube-api-access-c2skm" (OuterVolumeSpecName: "kube-api-access-c2skm") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "kube-api-access-c2skm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.726982 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.728015 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.785530 4634 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.785565 4634 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.785576 4634 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.785587 4634 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.785621 4634 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.785630 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2skm\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-kube-api-access-c2skm\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.785643 4634 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.785652 4634 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.819235 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-config-data" (OuterVolumeSpecName: "config-data") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.846235 4634 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.873047 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-server-conf" (OuterVolumeSpecName: "server-conf") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.886521 4634 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.886562 4634 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.886571 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.900413 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "dd3a9c91-300c-4510-b7a4-03cf8cbbe729" (UID: "dd3a9c91-300c-4510-b7a4-03cf8cbbe729"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.962943 4634 generic.go:334] "Generic (PLEG): container finished" podID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerID="6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0" exitCode=0 Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.963005 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd3a9c91-300c-4510-b7a4-03cf8cbbe729","Type":"ContainerDied","Data":"6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0"} Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.963048 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"dd3a9c91-300c-4510-b7a4-03cf8cbbe729","Type":"ContainerDied","Data":"1abbf3385ea639118b9fbee2443f63b1ed522e2a47f46814ebb5383a5b15aa5f"} Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.963124 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.963075 4634 scope.go:117] "RemoveContainer" containerID="6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.989971 4634 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/dd3a9c91-300c-4510-b7a4-03cf8cbbe729-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:32 crc kubenswrapper[4634]: I0929 14:08:32.994998 4634 scope.go:117] "RemoveContainer" containerID="a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.024141 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.034372 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.056430 4634 scope.go:117] "RemoveContainer" containerID="6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.057071 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0\": container with ID starting with 6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0 not found: ID does not exist" containerID="6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.057317 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0"} err="failed to get container status \"6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0\": rpc error: code = NotFound desc = could not find container \"6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0\": container with ID starting with 6fb936f609668472e4029240bfc5679a89f3a9b8f98faa5bd9273fb3cefd19c0 not found: ID does not exist" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.057468 4634 scope.go:117] "RemoveContainer" containerID="a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.058014 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6\": container with ID starting with a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6 not found: ID does not exist" containerID="a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.058051 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6"} err="failed to get container status \"a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6\": rpc error: code = NotFound desc = could not find container \"a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6\": container with ID starting with a062729f8df0df2566711b720c6b0b54c5bbb0013955eae2749ac301a18f37e6 not found: ID does not exist" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.065733 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.066243 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="extract-content" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066265 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="extract-content" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.066291 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerName="rabbitmq" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066298 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerName="rabbitmq" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.066312 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="registry-server" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066319 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="registry-server" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.066337 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="registry-server" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066343 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="registry-server" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.066358 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerName="setup-container" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066364 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerName="setup-container" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.066378 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="extract-utilities" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066417 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="extract-utilities" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.066427 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="extract-utilities" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066435 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="extract-utilities" Sep 29 14:08:33 crc kubenswrapper[4634]: E0929 14:08:33.066454 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="extract-content" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066461 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="extract-content" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066668 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc3f0c2b-5ce6-4b7a-9094-9577887f53cd" containerName="registry-server" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066684 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" containerName="rabbitmq" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.066699 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d374a7a-9cdc-4308-a100-96b1efe56b6b" containerName="registry-server" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.068020 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.071344 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.071499 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.071948 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.072217 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.072379 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.072492 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.081290 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-4mq6l" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.105243 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.194843 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.194891 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-server-conf\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.194931 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/63bda06a-11bd-41fc-b988-30f1aa86b490-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.194957 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.195190 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/63bda06a-11bd-41fc-b988-30f1aa86b490-pod-info\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.195335 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-config-data\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.195477 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.195594 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.196004 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwt28\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-kube-api-access-lwt28\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.196258 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.196382 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.298635 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.298992 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.299069 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.299186 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-server-conf\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.299279 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/63bda06a-11bd-41fc-b988-30f1aa86b490-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.299359 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.299009 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.299544 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.300686 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.300859 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/63bda06a-11bd-41fc-b988-30f1aa86b490-pod-info\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.301058 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-config-data\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.301161 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-server-conf\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.301341 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.301476 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.301767 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwt28\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-kube-api-access-lwt28\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.301966 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/63bda06a-11bd-41fc-b988-30f1aa86b490-config-data\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.302335 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.311718 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/63bda06a-11bd-41fc-b988-30f1aa86b490-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.311797 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/63bda06a-11bd-41fc-b988-30f1aa86b490-pod-info\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.313630 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.315975 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.327025 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwt28\" (UniqueName: \"kubernetes.io/projected/63bda06a-11bd-41fc-b988-30f1aa86b490-kube-api-access-lwt28\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.356805 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"63bda06a-11bd-41fc-b988-30f1aa86b490\") " pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.387424 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: W0929 14:08:33.924746 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63bda06a_11bd_41fc_b988_30f1aa86b490.slice/crio-a27c42cd89762beb24c1a80e035efc94152ddc7bd2fa86add90a808b11e52c56 WatchSource:0}: Error finding container a27c42cd89762beb24c1a80e035efc94152ddc7bd2fa86add90a808b11e52c56: Status 404 returned error can't find the container with id a27c42cd89762beb24c1a80e035efc94152ddc7bd2fa86add90a808b11e52c56 Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.929129 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.976328 4634 generic.go:334] "Generic (PLEG): container finished" podID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerID="6afea5db50bc1e064128175751bff751b672b7f268ac3699275fe23fdbe35322" exitCode=0 Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.976406 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8efec8a2-4905-4ba0-b777-d4e2cd393bd6","Type":"ContainerDied","Data":"6afea5db50bc1e064128175751bff751b672b7f268ac3699275fe23fdbe35322"} Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.976784 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8efec8a2-4905-4ba0-b777-d4e2cd393bd6","Type":"ContainerDied","Data":"ed85ee231c08597128292d93a4b45120124c478c093809e98205b6e341b55dd0"} Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.976940 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed85ee231c08597128292d93a4b45120124c478c093809e98205b6e341b55dd0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.977194 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:33 crc kubenswrapper[4634]: I0929 14:08:33.977834 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"63bda06a-11bd-41fc-b988-30f1aa86b490","Type":"ContainerStarted","Data":"a27c42cd89762beb24c1a80e035efc94152ddc7bd2fa86add90a808b11e52c56"} Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119227 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-config-data\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119307 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119378 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-pod-info\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119443 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-server-conf\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119474 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-erlang-cookie\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119565 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-erlang-cookie-secret\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119613 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-confd\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119690 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpnmv\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-kube-api-access-hpnmv\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119727 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-tls\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119763 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-plugins-conf\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.119873 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-plugins\") pod \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\" (UID: \"8efec8a2-4905-4ba0-b777-d4e2cd393bd6\") " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.122815 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.139018 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.142320 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.144387 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.157937 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd3a9c91-300c-4510-b7a4-03cf8cbbe729" path="/var/lib/kubelet/pods/dd3a9c91-300c-4510-b7a4-03cf8cbbe729/volumes" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.175430 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-pod-info" (OuterVolumeSpecName: "pod-info") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.175588 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.179722 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.181766 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-kube-api-access-hpnmv" (OuterVolumeSpecName: "kube-api-access-hpnmv") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "kube-api-access-hpnmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.215778 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-config-data" (OuterVolumeSpecName: "config-data") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226839 4634 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226883 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpnmv\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-kube-api-access-hpnmv\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226893 4634 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226903 4634 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226912 4634 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226920 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226943 4634 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226952 4634 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.226962 4634 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.263947 4634 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.266605 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-server-conf" (OuterVolumeSpecName: "server-conf") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.329159 4634 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.329210 4634 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.334530 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8efec8a2-4905-4ba0-b777-d4e2cd393bd6" (UID: "8efec8a2-4905-4ba0-b777-d4e2cd393bd6"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:34 crc kubenswrapper[4634]: I0929 14:08:34.431358 4634 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8efec8a2-4905-4ba0-b777-d4e2cd393bd6-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.006710 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.007052 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"63bda06a-11bd-41fc-b988-30f1aa86b490","Type":"ContainerStarted","Data":"e901e77a762f99a3c0668d6e3fddb84fd5aa24b902e3ba63585d30a2cd35804b"} Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.080195 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.096649 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.110708 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:08:35 crc kubenswrapper[4634]: E0929 14:08:35.111451 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerName="rabbitmq" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.111519 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerName="rabbitmq" Sep 29 14:08:35 crc kubenswrapper[4634]: E0929 14:08:35.111588 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerName="setup-container" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.111636 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerName="setup-container" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.111911 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" containerName="rabbitmq" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.113057 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.120300 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.120345 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-dm6zl" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.120490 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.120321 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.121656 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.121810 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.126861 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.131465 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.249610 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.249675 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/51c0f162-132f-48c2-8e8a-65c4c4d69c69-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.249744 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.249764 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.249838 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv5v5\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-kube-api-access-mv5v5\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.249874 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/51c0f162-132f-48c2-8e8a-65c4c4d69c69-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.249896 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.250034 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.250065 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.250155 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.250377 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.352492 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.352555 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.352778 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353151 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353578 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353627 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353655 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/51c0f162-132f-48c2-8e8a-65c4c4d69c69-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353691 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353620 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353716 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353739 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv5v5\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-kube-api-access-mv5v5\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353771 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/51c0f162-132f-48c2-8e8a-65c4c4d69c69-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.353792 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.354109 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.355260 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.355656 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.355763 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/51c0f162-132f-48c2-8e8a-65c4c4d69c69-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.361165 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.361707 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/51c0f162-132f-48c2-8e8a-65c4c4d69c69-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.376875 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/51c0f162-132f-48c2-8e8a-65c4c4d69c69-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.377232 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.377307 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv5v5\" (UniqueName: \"kubernetes.io/projected/51c0f162-132f-48c2-8e8a-65c4c4d69c69-kube-api-access-mv5v5\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.390013 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"51c0f162-132f-48c2-8e8a-65c4c4d69c69\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.436796 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.731625 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-ckc7b"] Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.736656 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.749554 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.754327 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-ckc7b"] Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.833076 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.865698 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.865778 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-config\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.865832 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-svc\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.865892 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.865967 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-576zz\" (UniqueName: \"kubernetes.io/projected/499f1e83-1d21-4d45-8ed9-4496cdcfd278-kube-api-access-576zz\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.865998 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.866018 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.967828 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.968351 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-576zz\" (UniqueName: \"kubernetes.io/projected/499f1e83-1d21-4d45-8ed9-4496cdcfd278-kube-api-access-576zz\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.968388 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.968422 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.968464 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.968502 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-config\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.968549 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-svc\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.969730 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.969736 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-svc\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.969819 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-config\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.970373 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.972578 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.972584 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:35 crc kubenswrapper[4634]: I0929 14:08:35.989543 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-576zz\" (UniqueName: \"kubernetes.io/projected/499f1e83-1d21-4d45-8ed9-4496cdcfd278-kube-api-access-576zz\") pod \"dnsmasq-dns-67b789f86c-ckc7b\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:36 crc kubenswrapper[4634]: I0929 14:08:36.017282 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"51c0f162-132f-48c2-8e8a-65c4c4d69c69","Type":"ContainerStarted","Data":"2c146eeb9a5a9f0cdf83dfd5762e451d88bb5266afe4cdcc0a34f8820db23b6b"} Sep 29 14:08:36 crc kubenswrapper[4634]: I0929 14:08:36.074417 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:36 crc kubenswrapper[4634]: I0929 14:08:36.141063 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8efec8a2-4905-4ba0-b777-d4e2cd393bd6" path="/var/lib/kubelet/pods/8efec8a2-4905-4ba0-b777-d4e2cd393bd6/volumes" Sep 29 14:08:36 crc kubenswrapper[4634]: I0929 14:08:36.559481 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-ckc7b"] Sep 29 14:08:36 crc kubenswrapper[4634]: W0929 14:08:36.565279 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod499f1e83_1d21_4d45_8ed9_4496cdcfd278.slice/crio-6baa6ba8ea604d1625a0bda734f432123f38ab4c40afe658d34b61c54726172c WatchSource:0}: Error finding container 6baa6ba8ea604d1625a0bda734f432123f38ab4c40afe658d34b61c54726172c: Status 404 returned error can't find the container with id 6baa6ba8ea604d1625a0bda734f432123f38ab4c40afe658d34b61c54726172c Sep 29 14:08:37 crc kubenswrapper[4634]: I0929 14:08:37.031986 4634 generic.go:334] "Generic (PLEG): container finished" podID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" containerID="ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b" exitCode=0 Sep 29 14:08:37 crc kubenswrapper[4634]: I0929 14:08:37.032128 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" event={"ID":"499f1e83-1d21-4d45-8ed9-4496cdcfd278","Type":"ContainerDied","Data":"ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b"} Sep 29 14:08:37 crc kubenswrapper[4634]: I0929 14:08:37.032171 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" event={"ID":"499f1e83-1d21-4d45-8ed9-4496cdcfd278","Type":"ContainerStarted","Data":"6baa6ba8ea604d1625a0bda734f432123f38ab4c40afe658d34b61c54726172c"} Sep 29 14:08:37 crc kubenswrapper[4634]: I0929 14:08:37.039980 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"51c0f162-132f-48c2-8e8a-65c4c4d69c69","Type":"ContainerStarted","Data":"196652b6fe688f38b06699ad5e4776511234bd0b7f1e19fd7bc58c21d12bb582"} Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.062974 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" event={"ID":"499f1e83-1d21-4d45-8ed9-4496cdcfd278","Type":"ContainerStarted","Data":"0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822"} Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.066463 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.102047 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" podStartSLOduration=3.102021796 podStartE2EDuration="3.102021796s" podCreationTimestamp="2025-09-29 14:08:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:08:38.09690829 +0000 UTC m=+1448.665636049" watchObservedRunningTime="2025-09-29 14:08:38.102021796 +0000 UTC m=+1448.670749555" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.330023 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4zsn2"] Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.333592 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.358259 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4zsn2"] Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.426385 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-utilities\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.426911 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-catalog-content\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.427097 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdcd2\" (UniqueName: \"kubernetes.io/projected/13b35f43-a76b-489a-be29-d8d4d3f4723d-kube-api-access-gdcd2\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.529723 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-catalog-content\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.529825 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdcd2\" (UniqueName: \"kubernetes.io/projected/13b35f43-a76b-489a-be29-d8d4d3f4723d-kube-api-access-gdcd2\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.529890 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-utilities\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.530729 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-catalog-content\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.530802 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-utilities\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.558177 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdcd2\" (UniqueName: \"kubernetes.io/projected/13b35f43-a76b-489a-be29-d8d4d3f4723d-kube-api-access-gdcd2\") pod \"certified-operators-4zsn2\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:38 crc kubenswrapper[4634]: I0929 14:08:38.671480 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:39 crc kubenswrapper[4634]: I0929 14:08:39.223749 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4zsn2"] Sep 29 14:08:40 crc kubenswrapper[4634]: I0929 14:08:40.084638 4634 generic.go:334] "Generic (PLEG): container finished" podID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerID="1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148" exitCode=0 Sep 29 14:08:40 crc kubenswrapper[4634]: I0929 14:08:40.084758 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4zsn2" event={"ID":"13b35f43-a76b-489a-be29-d8d4d3f4723d","Type":"ContainerDied","Data":"1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148"} Sep 29 14:08:40 crc kubenswrapper[4634]: I0929 14:08:40.085129 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4zsn2" event={"ID":"13b35f43-a76b-489a-be29-d8d4d3f4723d","Type":"ContainerStarted","Data":"fed90779324d1d2e73f5a730b6d37bac776377410006a32f4bc5d2dc90dfd8b9"} Sep 29 14:08:42 crc kubenswrapper[4634]: I0929 14:08:42.127103 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4zsn2" event={"ID":"13b35f43-a76b-489a-be29-d8d4d3f4723d","Type":"ContainerStarted","Data":"434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e"} Sep 29 14:08:43 crc kubenswrapper[4634]: I0929 14:08:43.154195 4634 generic.go:334] "Generic (PLEG): container finished" podID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerID="434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e" exitCode=0 Sep 29 14:08:43 crc kubenswrapper[4634]: I0929 14:08:43.154612 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4zsn2" event={"ID":"13b35f43-a76b-489a-be29-d8d4d3f4723d","Type":"ContainerDied","Data":"434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e"} Sep 29 14:08:44 crc kubenswrapper[4634]: I0929 14:08:44.167000 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4zsn2" event={"ID":"13b35f43-a76b-489a-be29-d8d4d3f4723d","Type":"ContainerStarted","Data":"25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753"} Sep 29 14:08:44 crc kubenswrapper[4634]: I0929 14:08:44.191866 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4zsn2" podStartSLOduration=2.70631182 podStartE2EDuration="6.191838989s" podCreationTimestamp="2025-09-29 14:08:38 +0000 UTC" firstStartedPulling="2025-09-29 14:08:40.088055574 +0000 UTC m=+1450.656783323" lastFinishedPulling="2025-09-29 14:08:43.573582703 +0000 UTC m=+1454.142310492" observedRunningTime="2025-09-29 14:08:44.188586663 +0000 UTC m=+1454.757314412" watchObservedRunningTime="2025-09-29 14:08:44.191838989 +0000 UTC m=+1454.760566738" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.076453 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.171492 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-5lnpd"] Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.172483 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" podUID="8a51b64e-3f78-4240-8c8d-3744e65046f5" containerName="dnsmasq-dns" containerID="cri-o://5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95" gracePeriod=10 Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.491555 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7b659bdd7f-jbsbr"] Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.493404 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.505414 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b659bdd7f-jbsbr"] Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.630821 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-ovsdbserver-sb\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.631228 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl2bt\" (UniqueName: \"kubernetes.io/projected/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-kube-api-access-wl2bt\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.631347 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-config\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.631398 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-dns-svc\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.631598 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-dns-swift-storage-0\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.631638 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-ovsdbserver-nb\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.631668 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.733184 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-ovsdbserver-sb\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.733243 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl2bt\" (UniqueName: \"kubernetes.io/projected/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-kube-api-access-wl2bt\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.733304 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-config\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.733335 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-dns-svc\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.733427 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-dns-swift-storage-0\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.733453 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-ovsdbserver-nb\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.733476 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.734650 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.735039 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-config\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.735261 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-dns-svc\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.735658 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-ovsdbserver-sb\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.735882 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-dns-swift-storage-0\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.736385 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-ovsdbserver-nb\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.769650 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl2bt\" (UniqueName: \"kubernetes.io/projected/8d1a9c90-4eaf-4553-b80b-2d608c11af9a-kube-api-access-wl2bt\") pod \"dnsmasq-dns-7b659bdd7f-jbsbr\" (UID: \"8d1a9c90-4eaf-4553-b80b-2d608c11af9a\") " pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.812555 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:46 crc kubenswrapper[4634]: I0929 14:08:46.934279 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.041071 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89xkr\" (UniqueName: \"kubernetes.io/projected/8a51b64e-3f78-4240-8c8d-3744e65046f5-kube-api-access-89xkr\") pod \"8a51b64e-3f78-4240-8c8d-3744e65046f5\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.041532 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-nb\") pod \"8a51b64e-3f78-4240-8c8d-3744e65046f5\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.041687 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-swift-storage-0\") pod \"8a51b64e-3f78-4240-8c8d-3744e65046f5\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.041739 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-svc\") pod \"8a51b64e-3f78-4240-8c8d-3744e65046f5\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.041786 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-sb\") pod \"8a51b64e-3f78-4240-8c8d-3744e65046f5\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.041807 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-config\") pod \"8a51b64e-3f78-4240-8c8d-3744e65046f5\" (UID: \"8a51b64e-3f78-4240-8c8d-3744e65046f5\") " Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.063722 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a51b64e-3f78-4240-8c8d-3744e65046f5-kube-api-access-89xkr" (OuterVolumeSpecName: "kube-api-access-89xkr") pod "8a51b64e-3f78-4240-8c8d-3744e65046f5" (UID: "8a51b64e-3f78-4240-8c8d-3744e65046f5"). InnerVolumeSpecName "kube-api-access-89xkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.067678 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89xkr\" (UniqueName: \"kubernetes.io/projected/8a51b64e-3f78-4240-8c8d-3744e65046f5-kube-api-access-89xkr\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.172519 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8a51b64e-3f78-4240-8c8d-3744e65046f5" (UID: "8a51b64e-3f78-4240-8c8d-3744e65046f5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.187698 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8a51b64e-3f78-4240-8c8d-3744e65046f5" (UID: "8a51b64e-3f78-4240-8c8d-3744e65046f5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.215532 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-config" (OuterVolumeSpecName: "config") pod "8a51b64e-3f78-4240-8c8d-3744e65046f5" (UID: "8a51b64e-3f78-4240-8c8d-3744e65046f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.228021 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8a51b64e-3f78-4240-8c8d-3744e65046f5" (UID: "8a51b64e-3f78-4240-8c8d-3744e65046f5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.237104 4634 generic.go:334] "Generic (PLEG): container finished" podID="8a51b64e-3f78-4240-8c8d-3744e65046f5" containerID="5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95" exitCode=0 Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.237150 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" event={"ID":"8a51b64e-3f78-4240-8c8d-3744e65046f5","Type":"ContainerDied","Data":"5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95"} Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.237183 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" event={"ID":"8a51b64e-3f78-4240-8c8d-3744e65046f5","Type":"ContainerDied","Data":"7516b8a80f9deb388653a3663d0f0810866fb7b90c6fe36151c3635137a54e72"} Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.237203 4634 scope.go:117] "RemoveContainer" containerID="5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.237367 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-5lnpd" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.242541 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8a51b64e-3f78-4240-8c8d-3744e65046f5" (UID: "8a51b64e-3f78-4240-8c8d-3744e65046f5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.271239 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.271278 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.271291 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.271299 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.271308 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a51b64e-3f78-4240-8c8d-3744e65046f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.287266 4634 scope.go:117] "RemoveContainer" containerID="c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.351648 4634 scope.go:117] "RemoveContainer" containerID="5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95" Sep 29 14:08:47 crc kubenswrapper[4634]: E0929 14:08:47.352230 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95\": container with ID starting with 5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95 not found: ID does not exist" containerID="5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.352302 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95"} err="failed to get container status \"5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95\": rpc error: code = NotFound desc = could not find container \"5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95\": container with ID starting with 5cbec68c8f6efc6f6f41252f537ac2c0897742a42fa51aca4be11877ecad4a95 not found: ID does not exist" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.352340 4634 scope.go:117] "RemoveContainer" containerID="c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b" Sep 29 14:08:47 crc kubenswrapper[4634]: E0929 14:08:47.352701 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b\": container with ID starting with c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b not found: ID does not exist" containerID="c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.352732 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b"} err="failed to get container status \"c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b\": rpc error: code = NotFound desc = could not find container \"c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b\": container with ID starting with c406a76e41ce489aa87f8be2fea870454b2c5eeebd29f411ddfbb8af2e054e8b not found: ID does not exist" Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.465071 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7b659bdd7f-jbsbr"] Sep 29 14:08:47 crc kubenswrapper[4634]: W0929 14:08:47.473422 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d1a9c90_4eaf_4553_b80b_2d608c11af9a.slice/crio-269f5afc59812751134e82f0c2fe2c762f179b05dbc045ce4743348b628a382a WatchSource:0}: Error finding container 269f5afc59812751134e82f0c2fe2c762f179b05dbc045ce4743348b628a382a: Status 404 returned error can't find the container with id 269f5afc59812751134e82f0c2fe2c762f179b05dbc045ce4743348b628a382a Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.659568 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-5lnpd"] Sep 29 14:08:47 crc kubenswrapper[4634]: I0929 14:08:47.675805 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-5lnpd"] Sep 29 14:08:48 crc kubenswrapper[4634]: I0929 14:08:48.131472 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a51b64e-3f78-4240-8c8d-3744e65046f5" path="/var/lib/kubelet/pods/8a51b64e-3f78-4240-8c8d-3744e65046f5/volumes" Sep 29 14:08:48 crc kubenswrapper[4634]: I0929 14:08:48.254556 4634 generic.go:334] "Generic (PLEG): container finished" podID="8d1a9c90-4eaf-4553-b80b-2d608c11af9a" containerID="881339cbf45b32cdc0fde4fe8c5253b36ed53bd6a053afc30c9779965187db04" exitCode=0 Sep 29 14:08:48 crc kubenswrapper[4634]: I0929 14:08:48.254662 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" event={"ID":"8d1a9c90-4eaf-4553-b80b-2d608c11af9a","Type":"ContainerDied","Data":"881339cbf45b32cdc0fde4fe8c5253b36ed53bd6a053afc30c9779965187db04"} Sep 29 14:08:48 crc kubenswrapper[4634]: I0929 14:08:48.256388 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" event={"ID":"8d1a9c90-4eaf-4553-b80b-2d608c11af9a","Type":"ContainerStarted","Data":"269f5afc59812751134e82f0c2fe2c762f179b05dbc045ce4743348b628a382a"} Sep 29 14:08:48 crc kubenswrapper[4634]: I0929 14:08:48.671544 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:48 crc kubenswrapper[4634]: I0929 14:08:48.672100 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:48 crc kubenswrapper[4634]: I0929 14:08:48.727332 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:49 crc kubenswrapper[4634]: I0929 14:08:49.274318 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" event={"ID":"8d1a9c90-4eaf-4553-b80b-2d608c11af9a","Type":"ContainerStarted","Data":"5d9abee5ac7617c640989d72136c909ec9331bf58f5720c5f1a45fec99647822"} Sep 29 14:08:49 crc kubenswrapper[4634]: I0929 14:08:49.316269 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" podStartSLOduration=3.316244546 podStartE2EDuration="3.316244546s" podCreationTimestamp="2025-09-29 14:08:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:08:49.314980662 +0000 UTC m=+1459.883708451" watchObservedRunningTime="2025-09-29 14:08:49.316244546 +0000 UTC m=+1459.884972305" Sep 29 14:08:49 crc kubenswrapper[4634]: I0929 14:08:49.360875 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:49 crc kubenswrapper[4634]: I0929 14:08:49.426148 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4zsn2"] Sep 29 14:08:49 crc kubenswrapper[4634]: I0929 14:08:49.943775 4634 scope.go:117] "RemoveContainer" containerID="6afea5db50bc1e064128175751bff751b672b7f268ac3699275fe23fdbe35322" Sep 29 14:08:49 crc kubenswrapper[4634]: I0929 14:08:49.975651 4634 scope.go:117] "RemoveContainer" containerID="e0c764d7d4ed23a6a5c1128184699ea4593dc70f0f9b4134ef1f7d99594ef8a6" Sep 29 14:08:50 crc kubenswrapper[4634]: I0929 14:08:50.026266 4634 scope.go:117] "RemoveContainer" containerID="0617af5024010462e3ff97d997662259c347192a23b50e962760fa8e5e1b4604" Sep 29 14:08:50 crc kubenswrapper[4634]: I0929 14:08:50.282899 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:51 crc kubenswrapper[4634]: I0929 14:08:51.300787 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4zsn2" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerName="registry-server" containerID="cri-o://25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753" gracePeriod=2 Sep 29 14:08:51 crc kubenswrapper[4634]: I0929 14:08:51.824984 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:51 crc kubenswrapper[4634]: I0929 14:08:51.908739 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-catalog-content\") pod \"13b35f43-a76b-489a-be29-d8d4d3f4723d\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " Sep 29 14:08:51 crc kubenswrapper[4634]: I0929 14:08:51.908892 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdcd2\" (UniqueName: \"kubernetes.io/projected/13b35f43-a76b-489a-be29-d8d4d3f4723d-kube-api-access-gdcd2\") pod \"13b35f43-a76b-489a-be29-d8d4d3f4723d\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " Sep 29 14:08:51 crc kubenswrapper[4634]: I0929 14:08:51.909073 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-utilities\") pod \"13b35f43-a76b-489a-be29-d8d4d3f4723d\" (UID: \"13b35f43-a76b-489a-be29-d8d4d3f4723d\") " Sep 29 14:08:51 crc kubenswrapper[4634]: I0929 14:08:51.910813 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-utilities" (OuterVolumeSpecName: "utilities") pod "13b35f43-a76b-489a-be29-d8d4d3f4723d" (UID: "13b35f43-a76b-489a-be29-d8d4d3f4723d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:51 crc kubenswrapper[4634]: I0929 14:08:51.922526 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13b35f43-a76b-489a-be29-d8d4d3f4723d-kube-api-access-gdcd2" (OuterVolumeSpecName: "kube-api-access-gdcd2") pod "13b35f43-a76b-489a-be29-d8d4d3f4723d" (UID: "13b35f43-a76b-489a-be29-d8d4d3f4723d"). InnerVolumeSpecName "kube-api-access-gdcd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:51 crc kubenswrapper[4634]: I0929 14:08:51.978324 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "13b35f43-a76b-489a-be29-d8d4d3f4723d" (UID: "13b35f43-a76b-489a-be29-d8d4d3f4723d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.012642 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.012676 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13b35f43-a76b-489a-be29-d8d4d3f4723d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.012690 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdcd2\" (UniqueName: \"kubernetes.io/projected/13b35f43-a76b-489a-be29-d8d4d3f4723d-kube-api-access-gdcd2\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.315621 4634 generic.go:334] "Generic (PLEG): container finished" podID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerID="25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753" exitCode=0 Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.315710 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4zsn2" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.315736 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4zsn2" event={"ID":"13b35f43-a76b-489a-be29-d8d4d3f4723d","Type":"ContainerDied","Data":"25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753"} Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.316155 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4zsn2" event={"ID":"13b35f43-a76b-489a-be29-d8d4d3f4723d","Type":"ContainerDied","Data":"fed90779324d1d2e73f5a730b6d37bac776377410006a32f4bc5d2dc90dfd8b9"} Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.316189 4634 scope.go:117] "RemoveContainer" containerID="25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.359354 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4zsn2"] Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.378643 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4zsn2"] Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.388131 4634 scope.go:117] "RemoveContainer" containerID="434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.435837 4634 scope.go:117] "RemoveContainer" containerID="1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.471977 4634 scope.go:117] "RemoveContainer" containerID="25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753" Sep 29 14:08:52 crc kubenswrapper[4634]: E0929 14:08:52.472565 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753\": container with ID starting with 25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753 not found: ID does not exist" containerID="25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.472629 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753"} err="failed to get container status \"25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753\": rpc error: code = NotFound desc = could not find container \"25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753\": container with ID starting with 25f832e4a26f667b65320b772ce464ddf74cec1b45c6812fb85516dae4a4c753 not found: ID does not exist" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.472662 4634 scope.go:117] "RemoveContainer" containerID="434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e" Sep 29 14:08:52 crc kubenswrapper[4634]: E0929 14:08:52.473042 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e\": container with ID starting with 434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e not found: ID does not exist" containerID="434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.473073 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e"} err="failed to get container status \"434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e\": rpc error: code = NotFound desc = could not find container \"434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e\": container with ID starting with 434343a618d98dc48dc93e12d2a7837ea85184b3655b1a8aa35544528ce5df0e not found: ID does not exist" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.473098 4634 scope.go:117] "RemoveContainer" containerID="1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148" Sep 29 14:08:52 crc kubenswrapper[4634]: E0929 14:08:52.473381 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148\": container with ID starting with 1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148 not found: ID does not exist" containerID="1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148" Sep 29 14:08:52 crc kubenswrapper[4634]: I0929 14:08:52.473431 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148"} err="failed to get container status \"1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148\": rpc error: code = NotFound desc = could not find container \"1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148\": container with ID starting with 1a904edba7d88a7954e9d6ab4ae69aac64b33c6b3ed414d4cd8fc4985ae31148 not found: ID does not exist" Sep 29 14:08:54 crc kubenswrapper[4634]: I0929 14:08:54.133579 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" path="/var/lib/kubelet/pods/13b35f43-a76b-489a-be29-d8d4d3f4723d/volumes" Sep 29 14:08:56 crc kubenswrapper[4634]: I0929 14:08:56.815427 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7b659bdd7f-jbsbr" Sep 29 14:08:56 crc kubenswrapper[4634]: I0929 14:08:56.908316 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-ckc7b"] Sep 29 14:08:56 crc kubenswrapper[4634]: I0929 14:08:56.908689 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" podUID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" containerName="dnsmasq-dns" containerID="cri-o://0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822" gracePeriod=10 Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.382934 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.409065 4634 generic.go:334] "Generic (PLEG): container finished" podID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" containerID="0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822" exitCode=0 Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.409163 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.409276 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" event={"ID":"499f1e83-1d21-4d45-8ed9-4496cdcfd278","Type":"ContainerDied","Data":"0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822"} Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.410301 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-ckc7b" event={"ID":"499f1e83-1d21-4d45-8ed9-4496cdcfd278","Type":"ContainerDied","Data":"6baa6ba8ea604d1625a0bda734f432123f38ab4c40afe658d34b61c54726172c"} Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.410407 4634 scope.go:117] "RemoveContainer" containerID="0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.452293 4634 scope.go:117] "RemoveContainer" containerID="ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.479690 4634 scope.go:117] "RemoveContainer" containerID="0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822" Sep 29 14:08:57 crc kubenswrapper[4634]: E0929 14:08:57.480477 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822\": container with ID starting with 0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822 not found: ID does not exist" containerID="0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.480517 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822"} err="failed to get container status \"0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822\": rpc error: code = NotFound desc = could not find container \"0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822\": container with ID starting with 0c6a9d9a8048f52274deda3a3db93c5beb3b2c87861a8ed9d061ec923d6d3822 not found: ID does not exist" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.480551 4634 scope.go:117] "RemoveContainer" containerID="ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b" Sep 29 14:08:57 crc kubenswrapper[4634]: E0929 14:08:57.480788 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b\": container with ID starting with ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b not found: ID does not exist" containerID="ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.480815 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b"} err="failed to get container status \"ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b\": rpc error: code = NotFound desc = could not find container \"ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b\": container with ID starting with ba7f7bf177aaf094366670081f8fd5cb2f8406db4ec84d50b480a473254d197b not found: ID does not exist" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.564991 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-swift-storage-0\") pod \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.565049 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-svc\") pod \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.565070 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-sb\") pod \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.565174 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-nb\") pod \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.565235 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-openstack-edpm-ipam\") pod \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.565332 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-576zz\" (UniqueName: \"kubernetes.io/projected/499f1e83-1d21-4d45-8ed9-4496cdcfd278-kube-api-access-576zz\") pod \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.565397 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-config\") pod \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\" (UID: \"499f1e83-1d21-4d45-8ed9-4496cdcfd278\") " Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.586795 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/499f1e83-1d21-4d45-8ed9-4496cdcfd278-kube-api-access-576zz" (OuterVolumeSpecName: "kube-api-access-576zz") pod "499f1e83-1d21-4d45-8ed9-4496cdcfd278" (UID: "499f1e83-1d21-4d45-8ed9-4496cdcfd278"). InnerVolumeSpecName "kube-api-access-576zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.646926 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "499f1e83-1d21-4d45-8ed9-4496cdcfd278" (UID: "499f1e83-1d21-4d45-8ed9-4496cdcfd278"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.648422 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "499f1e83-1d21-4d45-8ed9-4496cdcfd278" (UID: "499f1e83-1d21-4d45-8ed9-4496cdcfd278"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.650478 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "499f1e83-1d21-4d45-8ed9-4496cdcfd278" (UID: "499f1e83-1d21-4d45-8ed9-4496cdcfd278"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.653974 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "499f1e83-1d21-4d45-8ed9-4496cdcfd278" (UID: "499f1e83-1d21-4d45-8ed9-4496cdcfd278"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.658920 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "499f1e83-1d21-4d45-8ed9-4496cdcfd278" (UID: "499f1e83-1d21-4d45-8ed9-4496cdcfd278"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.668772 4634 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.668803 4634 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.668817 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.668830 4634 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.668842 4634 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.668851 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-576zz\" (UniqueName: \"kubernetes.io/projected/499f1e83-1d21-4d45-8ed9-4496cdcfd278-kube-api-access-576zz\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.682237 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-config" (OuterVolumeSpecName: "config") pod "499f1e83-1d21-4d45-8ed9-4496cdcfd278" (UID: "499f1e83-1d21-4d45-8ed9-4496cdcfd278"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.749267 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-ckc7b"] Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.761576 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-ckc7b"] Sep 29 14:08:57 crc kubenswrapper[4634]: I0929 14:08:57.769598 4634 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/499f1e83-1d21-4d45-8ed9-4496cdcfd278-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:08:58 crc kubenswrapper[4634]: I0929 14:08:58.123482 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" path="/var/lib/kubelet/pods/499f1e83-1d21-4d45-8ed9-4496cdcfd278/volumes" Sep 29 14:09:05 crc kubenswrapper[4634]: I0929 14:09:05.541463 4634 generic.go:334] "Generic (PLEG): container finished" podID="63bda06a-11bd-41fc-b988-30f1aa86b490" containerID="e901e77a762f99a3c0668d6e3fddb84fd5aa24b902e3ba63585d30a2cd35804b" exitCode=0 Sep 29 14:09:05 crc kubenswrapper[4634]: I0929 14:09:05.541628 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"63bda06a-11bd-41fc-b988-30f1aa86b490","Type":"ContainerDied","Data":"e901e77a762f99a3c0668d6e3fddb84fd5aa24b902e3ba63585d30a2cd35804b"} Sep 29 14:09:06 crc kubenswrapper[4634]: I0929 14:09:06.563936 4634 generic.go:334] "Generic (PLEG): container finished" podID="51c0f162-132f-48c2-8e8a-65c4c4d69c69" containerID="196652b6fe688f38b06699ad5e4776511234bd0b7f1e19fd7bc58c21d12bb582" exitCode=0 Sep 29 14:09:06 crc kubenswrapper[4634]: I0929 14:09:06.564055 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"51c0f162-132f-48c2-8e8a-65c4c4d69c69","Type":"ContainerDied","Data":"196652b6fe688f38b06699ad5e4776511234bd0b7f1e19fd7bc58c21d12bb582"} Sep 29 14:09:06 crc kubenswrapper[4634]: I0929 14:09:06.568450 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"63bda06a-11bd-41fc-b988-30f1aa86b490","Type":"ContainerStarted","Data":"1d78173d72779e1937a051a8efabc46aab6d31b6e3f39e3e76810627c6c468cd"} Sep 29 14:09:06 crc kubenswrapper[4634]: I0929 14:09:06.568898 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 14:09:07 crc kubenswrapper[4634]: I0929 14:09:07.586832 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"51c0f162-132f-48c2-8e8a-65c4c4d69c69","Type":"ContainerStarted","Data":"d23d70fc66a4512b2c47ab87fea0be60e29dad705fa7f762ea2fa4af306fba3d"} Sep 29 14:09:07 crc kubenswrapper[4634]: I0929 14:09:07.587411 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:09:07 crc kubenswrapper[4634]: I0929 14:09:07.614803 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=34.614774901 podStartE2EDuration="34.614774901s" podCreationTimestamp="2025-09-29 14:08:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:09:06.658447935 +0000 UTC m=+1477.227175684" watchObservedRunningTime="2025-09-29 14:09:07.614774901 +0000 UTC m=+1478.183502650" Sep 29 14:09:07 crc kubenswrapper[4634]: I0929 14:09:07.617793 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=32.617779861 podStartE2EDuration="32.617779861s" podCreationTimestamp="2025-09-29 14:08:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:09:07.609334416 +0000 UTC m=+1478.178062165" watchObservedRunningTime="2025-09-29 14:09:07.617779861 +0000 UTC m=+1478.186507630" Sep 29 14:09:14 crc kubenswrapper[4634]: I0929 14:09:14.396193 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:09:14 crc kubenswrapper[4634]: I0929 14:09:14.396797 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.695898 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj"] Sep 29 14:09:16 crc kubenswrapper[4634]: E0929 14:09:16.696693 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerName="extract-utilities" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.696706 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerName="extract-utilities" Sep 29 14:09:16 crc kubenswrapper[4634]: E0929 14:09:16.696725 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a51b64e-3f78-4240-8c8d-3744e65046f5" containerName="init" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.696731 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a51b64e-3f78-4240-8c8d-3744e65046f5" containerName="init" Sep 29 14:09:16 crc kubenswrapper[4634]: E0929 14:09:16.696745 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerName="registry-server" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.696751 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerName="registry-server" Sep 29 14:09:16 crc kubenswrapper[4634]: E0929 14:09:16.696763 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a51b64e-3f78-4240-8c8d-3744e65046f5" containerName="dnsmasq-dns" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.696769 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a51b64e-3f78-4240-8c8d-3744e65046f5" containerName="dnsmasq-dns" Sep 29 14:09:16 crc kubenswrapper[4634]: E0929 14:09:16.696784 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerName="extract-content" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.696790 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerName="extract-content" Sep 29 14:09:16 crc kubenswrapper[4634]: E0929 14:09:16.696802 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" containerName="dnsmasq-dns" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.696808 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" containerName="dnsmasq-dns" Sep 29 14:09:16 crc kubenswrapper[4634]: E0929 14:09:16.696829 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" containerName="init" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.696835 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" containerName="init" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.696998 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="499f1e83-1d21-4d45-8ed9-4496cdcfd278" containerName="dnsmasq-dns" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.697017 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="13b35f43-a76b-489a-be29-d8d4d3f4723d" containerName="registry-server" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.697038 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a51b64e-3f78-4240-8c8d-3744e65046f5" containerName="dnsmasq-dns" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.697797 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.700554 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.702783 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.702915 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.703032 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.711200 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj"] Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.712736 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.712792 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9792\" (UniqueName: \"kubernetes.io/projected/61b3895b-871d-4318-8fb4-4426fcd6611a-kube-api-access-x9792\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.712849 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.712999 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.814965 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.815094 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9792\" (UniqueName: \"kubernetes.io/projected/61b3895b-871d-4318-8fb4-4426fcd6611a-kube-api-access-x9792\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.815166 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.815264 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.821582 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.822946 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.836812 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:16 crc kubenswrapper[4634]: I0929 14:09:16.839212 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9792\" (UniqueName: \"kubernetes.io/projected/61b3895b-871d-4318-8fb4-4426fcd6611a-kube-api-access-x9792\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:17 crc kubenswrapper[4634]: I0929 14:09:17.017202 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:18 crc kubenswrapper[4634]: I0929 14:09:18.024133 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj"] Sep 29 14:09:18 crc kubenswrapper[4634]: I0929 14:09:18.700573 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" event={"ID":"61b3895b-871d-4318-8fb4-4426fcd6611a","Type":"ContainerStarted","Data":"a0cc93bfad2c3471a0bea0a229833e00f15b9b16380b449b4c8db089a0527acb"} Sep 29 14:09:23 crc kubenswrapper[4634]: I0929 14:09:23.390260 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.297196 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bj8"] Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.305751 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.351457 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bj8"] Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.448401 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.450199 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-catalog-content\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.450354 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rwcz\" (UniqueName: \"kubernetes.io/projected/1677e589-df70-4584-8d02-1f905e7a2d15-kube-api-access-5rwcz\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.450413 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-utilities\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.553751 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-catalog-content\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.553846 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rwcz\" (UniqueName: \"kubernetes.io/projected/1677e589-df70-4584-8d02-1f905e7a2d15-kube-api-access-5rwcz\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.553908 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-utilities\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.554616 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-utilities\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.554880 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-catalog-content\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.716637 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rwcz\" (UniqueName: \"kubernetes.io/projected/1677e589-df70-4584-8d02-1f905e7a2d15-kube-api-access-5rwcz\") pod \"redhat-marketplace-k8bj8\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:25 crc kubenswrapper[4634]: I0929 14:09:25.947825 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:32 crc kubenswrapper[4634]: I0929 14:09:32.230327 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:09:32 crc kubenswrapper[4634]: I0929 14:09:32.275976 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bj8"] Sep 29 14:09:32 crc kubenswrapper[4634]: W0929 14:09:32.280352 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1677e589_df70_4584_8d02_1f905e7a2d15.slice/crio-16207870bb8051392073045a42adeed914123ac035112d5ad2251f67fc409580 WatchSource:0}: Error finding container 16207870bb8051392073045a42adeed914123ac035112d5ad2251f67fc409580: Status 404 returned error can't find the container with id 16207870bb8051392073045a42adeed914123ac035112d5ad2251f67fc409580 Sep 29 14:09:32 crc kubenswrapper[4634]: I0929 14:09:32.884516 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" event={"ID":"61b3895b-871d-4318-8fb4-4426fcd6611a","Type":"ContainerStarted","Data":"55493118f83cf9d96c6e1065f28d13a9b434ad42e30b5974948fb5410a9084ee"} Sep 29 14:09:32 crc kubenswrapper[4634]: I0929 14:09:32.887024 4634 generic.go:334] "Generic (PLEG): container finished" podID="1677e589-df70-4584-8d02-1f905e7a2d15" containerID="db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560" exitCode=0 Sep 29 14:09:32 crc kubenswrapper[4634]: I0929 14:09:32.887172 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bj8" event={"ID":"1677e589-df70-4584-8d02-1f905e7a2d15","Type":"ContainerDied","Data":"db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560"} Sep 29 14:09:32 crc kubenswrapper[4634]: I0929 14:09:32.887262 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bj8" event={"ID":"1677e589-df70-4584-8d02-1f905e7a2d15","Type":"ContainerStarted","Data":"16207870bb8051392073045a42adeed914123ac035112d5ad2251f67fc409580"} Sep 29 14:09:32 crc kubenswrapper[4634]: I0929 14:09:32.917156 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" podStartSLOduration=2.737627244 podStartE2EDuration="16.917131939s" podCreationTimestamp="2025-09-29 14:09:16 +0000 UTC" firstStartedPulling="2025-09-29 14:09:18.048001236 +0000 UTC m=+1488.616728985" lastFinishedPulling="2025-09-29 14:09:32.227505931 +0000 UTC m=+1502.796233680" observedRunningTime="2025-09-29 14:09:32.907893132 +0000 UTC m=+1503.476620901" watchObservedRunningTime="2025-09-29 14:09:32.917131939 +0000 UTC m=+1503.485859698" Sep 29 14:09:33 crc kubenswrapper[4634]: I0929 14:09:33.899632 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bj8" event={"ID":"1677e589-df70-4584-8d02-1f905e7a2d15","Type":"ContainerStarted","Data":"a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202"} Sep 29 14:09:34 crc kubenswrapper[4634]: I0929 14:09:34.911367 4634 generic.go:334] "Generic (PLEG): container finished" podID="1677e589-df70-4584-8d02-1f905e7a2d15" containerID="a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202" exitCode=0 Sep 29 14:09:34 crc kubenswrapper[4634]: I0929 14:09:34.911505 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bj8" event={"ID":"1677e589-df70-4584-8d02-1f905e7a2d15","Type":"ContainerDied","Data":"a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202"} Sep 29 14:09:35 crc kubenswrapper[4634]: I0929 14:09:35.923892 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bj8" event={"ID":"1677e589-df70-4584-8d02-1f905e7a2d15","Type":"ContainerStarted","Data":"abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f"} Sep 29 14:09:35 crc kubenswrapper[4634]: I0929 14:09:35.943043 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k8bj8" podStartSLOduration=8.145216728 podStartE2EDuration="10.943018279s" podCreationTimestamp="2025-09-29 14:09:25 +0000 UTC" firstStartedPulling="2025-09-29 14:09:32.888779134 +0000 UTC m=+1503.457506873" lastFinishedPulling="2025-09-29 14:09:35.686580675 +0000 UTC m=+1506.255308424" observedRunningTime="2025-09-29 14:09:35.940017569 +0000 UTC m=+1506.508745338" watchObservedRunningTime="2025-09-29 14:09:35.943018279 +0000 UTC m=+1506.511746038" Sep 29 14:09:35 crc kubenswrapper[4634]: I0929 14:09:35.948407 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:35 crc kubenswrapper[4634]: I0929 14:09:35.950382 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:37 crc kubenswrapper[4634]: I0929 14:09:37.003551 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-k8bj8" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="registry-server" probeResult="failure" output=< Sep 29 14:09:37 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:09:37 crc kubenswrapper[4634]: > Sep 29 14:09:44 crc kubenswrapper[4634]: I0929 14:09:44.396172 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:09:44 crc kubenswrapper[4634]: I0929 14:09:44.397213 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:09:45 crc kubenswrapper[4634]: I0929 14:09:45.043652 4634 generic.go:334] "Generic (PLEG): container finished" podID="61b3895b-871d-4318-8fb4-4426fcd6611a" containerID="55493118f83cf9d96c6e1065f28d13a9b434ad42e30b5974948fb5410a9084ee" exitCode=0 Sep 29 14:09:45 crc kubenswrapper[4634]: I0929 14:09:45.043709 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" event={"ID":"61b3895b-871d-4318-8fb4-4426fcd6611a","Type":"ContainerDied","Data":"55493118f83cf9d96c6e1065f28d13a9b434ad42e30b5974948fb5410a9084ee"} Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.017829 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.091298 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.262757 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bj8"] Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.638848 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.778677 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-ssh-key\") pod \"61b3895b-871d-4318-8fb4-4426fcd6611a\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.779184 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-inventory\") pod \"61b3895b-871d-4318-8fb4-4426fcd6611a\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.779243 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-repo-setup-combined-ca-bundle\") pod \"61b3895b-871d-4318-8fb4-4426fcd6611a\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.779323 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9792\" (UniqueName: \"kubernetes.io/projected/61b3895b-871d-4318-8fb4-4426fcd6611a-kube-api-access-x9792\") pod \"61b3895b-871d-4318-8fb4-4426fcd6611a\" (UID: \"61b3895b-871d-4318-8fb4-4426fcd6611a\") " Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.789504 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "61b3895b-871d-4318-8fb4-4426fcd6611a" (UID: "61b3895b-871d-4318-8fb4-4426fcd6611a"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.795483 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61b3895b-871d-4318-8fb4-4426fcd6611a-kube-api-access-x9792" (OuterVolumeSpecName: "kube-api-access-x9792") pod "61b3895b-871d-4318-8fb4-4426fcd6611a" (UID: "61b3895b-871d-4318-8fb4-4426fcd6611a"). InnerVolumeSpecName "kube-api-access-x9792". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.812013 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-inventory" (OuterVolumeSpecName: "inventory") pod "61b3895b-871d-4318-8fb4-4426fcd6611a" (UID: "61b3895b-871d-4318-8fb4-4426fcd6611a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.827249 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61b3895b-871d-4318-8fb4-4426fcd6611a" (UID: "61b3895b-871d-4318-8fb4-4426fcd6611a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.882367 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.882416 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.882431 4634 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61b3895b-871d-4318-8fb4-4426fcd6611a-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:46 crc kubenswrapper[4634]: I0929 14:09:46.882448 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9792\" (UniqueName: \"kubernetes.io/projected/61b3895b-871d-4318-8fb4-4426fcd6611a-kube-api-access-x9792\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.074770 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" event={"ID":"61b3895b-871d-4318-8fb4-4426fcd6611a","Type":"ContainerDied","Data":"a0cc93bfad2c3471a0bea0a229833e00f15b9b16380b449b4c8db089a0527acb"} Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.074816 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.074842 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0cc93bfad2c3471a0bea0a229833e00f15b9b16380b449b4c8db089a0527acb" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.075036 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k8bj8" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="registry-server" containerID="cri-o://abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f" gracePeriod=2 Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.166190 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw"] Sep 29 14:09:47 crc kubenswrapper[4634]: E0929 14:09:47.166656 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61b3895b-871d-4318-8fb4-4426fcd6611a" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.166680 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="61b3895b-871d-4318-8fb4-4426fcd6611a" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.166910 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="61b3895b-871d-4318-8fb4-4426fcd6611a" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.167792 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.171300 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.171816 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.171997 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.172177 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.215845 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw"] Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.300856 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.300934 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jh6n\" (UniqueName: \"kubernetes.io/projected/8043163a-b8ee-4991-9edb-8c7522be414e-kube-api-access-4jh6n\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.301107 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.421701 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.421813 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jh6n\" (UniqueName: \"kubernetes.io/projected/8043163a-b8ee-4991-9edb-8c7522be414e-kube-api-access-4jh6n\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.421854 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.429979 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.445725 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.455399 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jh6n\" (UniqueName: \"kubernetes.io/projected/8043163a-b8ee-4991-9edb-8c7522be414e-kube-api-access-4jh6n\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-phrjw\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.498336 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.588813 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.628651 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rwcz\" (UniqueName: \"kubernetes.io/projected/1677e589-df70-4584-8d02-1f905e7a2d15-kube-api-access-5rwcz\") pod \"1677e589-df70-4584-8d02-1f905e7a2d15\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.628793 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-utilities\") pod \"1677e589-df70-4584-8d02-1f905e7a2d15\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.629076 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-catalog-content\") pod \"1677e589-df70-4584-8d02-1f905e7a2d15\" (UID: \"1677e589-df70-4584-8d02-1f905e7a2d15\") " Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.632412 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-utilities" (OuterVolumeSpecName: "utilities") pod "1677e589-df70-4584-8d02-1f905e7a2d15" (UID: "1677e589-df70-4584-8d02-1f905e7a2d15"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.636477 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1677e589-df70-4584-8d02-1f905e7a2d15-kube-api-access-5rwcz" (OuterVolumeSpecName: "kube-api-access-5rwcz") pod "1677e589-df70-4584-8d02-1f905e7a2d15" (UID: "1677e589-df70-4584-8d02-1f905e7a2d15"). InnerVolumeSpecName "kube-api-access-5rwcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.647524 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1677e589-df70-4584-8d02-1f905e7a2d15" (UID: "1677e589-df70-4584-8d02-1f905e7a2d15"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.731628 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.731658 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1677e589-df70-4584-8d02-1f905e7a2d15-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:47 crc kubenswrapper[4634]: I0929 14:09:47.731670 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rwcz\" (UniqueName: \"kubernetes.io/projected/1677e589-df70-4584-8d02-1f905e7a2d15-kube-api-access-5rwcz\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.087336 4634 generic.go:334] "Generic (PLEG): container finished" podID="1677e589-df70-4584-8d02-1f905e7a2d15" containerID="abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f" exitCode=0 Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.087459 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k8bj8" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.087457 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bj8" event={"ID":"1677e589-df70-4584-8d02-1f905e7a2d15","Type":"ContainerDied","Data":"abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f"} Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.088891 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k8bj8" event={"ID":"1677e589-df70-4584-8d02-1f905e7a2d15","Type":"ContainerDied","Data":"16207870bb8051392073045a42adeed914123ac035112d5ad2251f67fc409580"} Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.088927 4634 scope.go:117] "RemoveContainer" containerID="abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.145362 4634 scope.go:117] "RemoveContainer" containerID="a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.156930 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bj8"] Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.167112 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k8bj8"] Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.202995 4634 scope.go:117] "RemoveContainer" containerID="db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.266711 4634 scope.go:117] "RemoveContainer" containerID="abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f" Sep 29 14:09:48 crc kubenswrapper[4634]: E0929 14:09:48.267349 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f\": container with ID starting with abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f not found: ID does not exist" containerID="abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.267405 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f"} err="failed to get container status \"abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f\": rpc error: code = NotFound desc = could not find container \"abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f\": container with ID starting with abcd57a7f55d3b8b8aef2d74ddfb4334f4bb735a613ab5e8221a04de4e02b80f not found: ID does not exist" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.267438 4634 scope.go:117] "RemoveContainer" containerID="a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202" Sep 29 14:09:48 crc kubenswrapper[4634]: E0929 14:09:48.268755 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202\": container with ID starting with a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202 not found: ID does not exist" containerID="a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.268813 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202"} err="failed to get container status \"a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202\": rpc error: code = NotFound desc = could not find container \"a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202\": container with ID starting with a9ed9d8f91212df0884100283d7b7f4200b30a9cb0ebf5187ea0e0510a8d9202 not found: ID does not exist" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.268854 4634 scope.go:117] "RemoveContainer" containerID="db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560" Sep 29 14:09:48 crc kubenswrapper[4634]: E0929 14:09:48.270007 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560\": container with ID starting with db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560 not found: ID does not exist" containerID="db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560" Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.270053 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560"} err="failed to get container status \"db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560\": rpc error: code = NotFound desc = could not find container \"db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560\": container with ID starting with db53da1bc8745b0a77d9a207edbdf95873016bf03e1882295ffce0342e10a560 not found: ID does not exist" Sep 29 14:09:48 crc kubenswrapper[4634]: W0929 14:09:48.282313 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8043163a_b8ee_4991_9edb_8c7522be414e.slice/crio-ac0737d7512664c1ee382b815be16d11d96fc92cd1f3cff637e2383dfb7cf110 WatchSource:0}: Error finding container ac0737d7512664c1ee382b815be16d11d96fc92cd1f3cff637e2383dfb7cf110: Status 404 returned error can't find the container with id ac0737d7512664c1ee382b815be16d11d96fc92cd1f3cff637e2383dfb7cf110 Sep 29 14:09:48 crc kubenswrapper[4634]: I0929 14:09:48.295026 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw"] Sep 29 14:09:49 crc kubenswrapper[4634]: I0929 14:09:49.116941 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" event={"ID":"8043163a-b8ee-4991-9edb-8c7522be414e","Type":"ContainerStarted","Data":"8cd148017e28fee82fe3705788d6506c9e5544a6c564e1b376cb18522a83f202"} Sep 29 14:09:49 crc kubenswrapper[4634]: I0929 14:09:49.117271 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" event={"ID":"8043163a-b8ee-4991-9edb-8c7522be414e","Type":"ContainerStarted","Data":"ac0737d7512664c1ee382b815be16d11d96fc92cd1f3cff637e2383dfb7cf110"} Sep 29 14:09:49 crc kubenswrapper[4634]: I0929 14:09:49.154721 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" podStartSLOduration=1.934711837 podStartE2EDuration="2.154699089s" podCreationTimestamp="2025-09-29 14:09:47 +0000 UTC" firstStartedPulling="2025-09-29 14:09:48.284956422 +0000 UTC m=+1518.853684171" lastFinishedPulling="2025-09-29 14:09:48.504943674 +0000 UTC m=+1519.073671423" observedRunningTime="2025-09-29 14:09:49.143019828 +0000 UTC m=+1519.711747577" watchObservedRunningTime="2025-09-29 14:09:49.154699089 +0000 UTC m=+1519.723426838" Sep 29 14:09:50 crc kubenswrapper[4634]: I0929 14:09:50.120701 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" path="/var/lib/kubelet/pods/1677e589-df70-4584-8d02-1f905e7a2d15/volumes" Sep 29 14:09:50 crc kubenswrapper[4634]: I0929 14:09:50.285005 4634 scope.go:117] "RemoveContainer" containerID="5b474e7fdb952612e113145a450b9833461dac6d9cfe3d0d7b72c8656be5cf77" Sep 29 14:09:50 crc kubenswrapper[4634]: I0929 14:09:50.349715 4634 scope.go:117] "RemoveContainer" containerID="7232fd46d12e34755e56d2296a450516e7e42381703498145e70cb5444166dcd" Sep 29 14:09:50 crc kubenswrapper[4634]: I0929 14:09:50.465021 4634 scope.go:117] "RemoveContainer" containerID="ed079008aeefce5858d982d3d33a2d5b455bd2a86cbe956c8de628b65a9b354a" Sep 29 14:09:50 crc kubenswrapper[4634]: I0929 14:09:50.516979 4634 scope.go:117] "RemoveContainer" containerID="11b6ffaf90198e70594a39598993c1923da047313ee98868ee82c0109cd05fea" Sep 29 14:09:52 crc kubenswrapper[4634]: I0929 14:09:52.152464 4634 generic.go:334] "Generic (PLEG): container finished" podID="8043163a-b8ee-4991-9edb-8c7522be414e" containerID="8cd148017e28fee82fe3705788d6506c9e5544a6c564e1b376cb18522a83f202" exitCode=0 Sep 29 14:09:52 crc kubenswrapper[4634]: I0929 14:09:52.152512 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" event={"ID":"8043163a-b8ee-4991-9edb-8c7522be414e","Type":"ContainerDied","Data":"8cd148017e28fee82fe3705788d6506c9e5544a6c564e1b376cb18522a83f202"} Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.595419 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.682623 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-inventory\") pod \"8043163a-b8ee-4991-9edb-8c7522be414e\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.683001 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jh6n\" (UniqueName: \"kubernetes.io/projected/8043163a-b8ee-4991-9edb-8c7522be414e-kube-api-access-4jh6n\") pod \"8043163a-b8ee-4991-9edb-8c7522be414e\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.683159 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-ssh-key\") pod \"8043163a-b8ee-4991-9edb-8c7522be414e\" (UID: \"8043163a-b8ee-4991-9edb-8c7522be414e\") " Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.696026 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8043163a-b8ee-4991-9edb-8c7522be414e-kube-api-access-4jh6n" (OuterVolumeSpecName: "kube-api-access-4jh6n") pod "8043163a-b8ee-4991-9edb-8c7522be414e" (UID: "8043163a-b8ee-4991-9edb-8c7522be414e"). InnerVolumeSpecName "kube-api-access-4jh6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.713789 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8043163a-b8ee-4991-9edb-8c7522be414e" (UID: "8043163a-b8ee-4991-9edb-8c7522be414e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.717966 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-inventory" (OuterVolumeSpecName: "inventory") pod "8043163a-b8ee-4991-9edb-8c7522be414e" (UID: "8043163a-b8ee-4991-9edb-8c7522be414e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.786249 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jh6n\" (UniqueName: \"kubernetes.io/projected/8043163a-b8ee-4991-9edb-8c7522be414e-kube-api-access-4jh6n\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.786284 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:53 crc kubenswrapper[4634]: I0929 14:09:53.786296 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8043163a-b8ee-4991-9edb-8c7522be414e-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.175204 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" event={"ID":"8043163a-b8ee-4991-9edb-8c7522be414e","Type":"ContainerDied","Data":"ac0737d7512664c1ee382b815be16d11d96fc92cd1f3cff637e2383dfb7cf110"} Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.175672 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac0737d7512664c1ee382b815be16d11d96fc92cd1f3cff637e2383dfb7cf110" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.175240 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-phrjw" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.331524 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf"] Sep 29 14:09:54 crc kubenswrapper[4634]: E0929 14:09:54.332005 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8043163a-b8ee-4991-9edb-8c7522be414e" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.332028 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8043163a-b8ee-4991-9edb-8c7522be414e" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 14:09:54 crc kubenswrapper[4634]: E0929 14:09:54.332048 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="extract-content" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.332055 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="extract-content" Sep 29 14:09:54 crc kubenswrapper[4634]: E0929 14:09:54.332113 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="extract-utilities" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.332121 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="extract-utilities" Sep 29 14:09:54 crc kubenswrapper[4634]: E0929 14:09:54.332138 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="registry-server" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.332144 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="registry-server" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.332317 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1677e589-df70-4584-8d02-1f905e7a2d15" containerName="registry-server" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.332344 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="8043163a-b8ee-4991-9edb-8c7522be414e" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.333115 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.338257 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.338563 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.338725 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.338872 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.347389 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf"] Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.399452 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.399512 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnl5w\" (UniqueName: \"kubernetes.io/projected/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-kube-api-access-pnl5w\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.399539 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.399664 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.502108 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.502562 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.502712 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.502816 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnl5w\" (UniqueName: \"kubernetes.io/projected/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-kube-api-access-pnl5w\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.506151 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.506442 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.519190 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.519681 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnl5w\" (UniqueName: \"kubernetes.io/projected/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-kube-api-access-pnl5w\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:54 crc kubenswrapper[4634]: I0929 14:09:54.707676 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:09:55 crc kubenswrapper[4634]: W0929 14:09:55.278453 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e3a5bac_db09_4bee_bc1a_a93841ada5ed.slice/crio-53bfdae0307c82b5c61e2aa52824f4129aa2122a55e310ee3c63628476e62271 WatchSource:0}: Error finding container 53bfdae0307c82b5c61e2aa52824f4129aa2122a55e310ee3c63628476e62271: Status 404 returned error can't find the container with id 53bfdae0307c82b5c61e2aa52824f4129aa2122a55e310ee3c63628476e62271 Sep 29 14:09:55 crc kubenswrapper[4634]: I0929 14:09:55.293283 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf"] Sep 29 14:09:56 crc kubenswrapper[4634]: I0929 14:09:56.201593 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" event={"ID":"5e3a5bac-db09-4bee-bc1a-a93841ada5ed","Type":"ContainerStarted","Data":"4373aea3aaff6077ba386b8f9e1fd475643acd205c1c702c98b5f4496421f56c"} Sep 29 14:09:56 crc kubenswrapper[4634]: I0929 14:09:56.202030 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" event={"ID":"5e3a5bac-db09-4bee-bc1a-a93841ada5ed","Type":"ContainerStarted","Data":"53bfdae0307c82b5c61e2aa52824f4129aa2122a55e310ee3c63628476e62271"} Sep 29 14:09:56 crc kubenswrapper[4634]: I0929 14:09:56.240915 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" podStartSLOduration=2.057400185 podStartE2EDuration="2.240888246s" podCreationTimestamp="2025-09-29 14:09:54 +0000 UTC" firstStartedPulling="2025-09-29 14:09:55.282141514 +0000 UTC m=+1525.850869263" lastFinishedPulling="2025-09-29 14:09:55.465629575 +0000 UTC m=+1526.034357324" observedRunningTime="2025-09-29 14:09:56.229927964 +0000 UTC m=+1526.798655713" watchObservedRunningTime="2025-09-29 14:09:56.240888246 +0000 UTC m=+1526.809616005" Sep 29 14:10:14 crc kubenswrapper[4634]: I0929 14:10:14.396135 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:10:14 crc kubenswrapper[4634]: I0929 14:10:14.396671 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:10:14 crc kubenswrapper[4634]: I0929 14:10:14.396728 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:10:14 crc kubenswrapper[4634]: I0929 14:10:14.397692 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:10:14 crc kubenswrapper[4634]: I0929 14:10:14.397752 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" gracePeriod=600 Sep 29 14:10:14 crc kubenswrapper[4634]: E0929 14:10:14.535292 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:10:15 crc kubenswrapper[4634]: I0929 14:10:15.390903 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" exitCode=0 Sep 29 14:10:15 crc kubenswrapper[4634]: I0929 14:10:15.390957 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a"} Sep 29 14:10:15 crc kubenswrapper[4634]: I0929 14:10:15.391273 4634 scope.go:117] "RemoveContainer" containerID="c310be9cfa8ed67485f93d39340fd3b9cbd0be1e3fbae3bd53e3014ebdb22b63" Sep 29 14:10:15 crc kubenswrapper[4634]: I0929 14:10:15.392119 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:10:15 crc kubenswrapper[4634]: E0929 14:10:15.392419 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:10:27 crc kubenswrapper[4634]: I0929 14:10:27.110280 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:10:27 crc kubenswrapper[4634]: E0929 14:10:27.111294 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:10:39 crc kubenswrapper[4634]: I0929 14:10:39.110490 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:10:39 crc kubenswrapper[4634]: E0929 14:10:39.111634 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:10:54 crc kubenswrapper[4634]: I0929 14:10:54.113958 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:10:54 crc kubenswrapper[4634]: E0929 14:10:54.115660 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:11:06 crc kubenswrapper[4634]: I0929 14:11:06.111465 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:11:06 crc kubenswrapper[4634]: E0929 14:11:06.112343 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:11:20 crc kubenswrapper[4634]: I0929 14:11:20.118191 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:11:20 crc kubenswrapper[4634]: E0929 14:11:20.120449 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:11:32 crc kubenswrapper[4634]: I0929 14:11:32.112342 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:11:32 crc kubenswrapper[4634]: E0929 14:11:32.113976 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:11:45 crc kubenswrapper[4634]: I0929 14:11:45.111026 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:11:45 crc kubenswrapper[4634]: E0929 14:11:45.111720 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:11:56 crc kubenswrapper[4634]: I0929 14:11:56.117864 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:11:56 crc kubenswrapper[4634]: E0929 14:11:56.119318 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:12:09 crc kubenswrapper[4634]: I0929 14:12:09.110600 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:12:09 crc kubenswrapper[4634]: E0929 14:12:09.113457 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:12:23 crc kubenswrapper[4634]: I0929 14:12:23.112684 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:12:23 crc kubenswrapper[4634]: E0929 14:12:23.113479 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:12:37 crc kubenswrapper[4634]: I0929 14:12:37.054291 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-vz7j6"] Sep 29 14:12:37 crc kubenswrapper[4634]: I0929 14:12:37.070719 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-vz7j6"] Sep 29 14:12:38 crc kubenswrapper[4634]: I0929 14:12:38.032055 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-6j4jw"] Sep 29 14:12:38 crc kubenswrapper[4634]: I0929 14:12:38.042588 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-992nz"] Sep 29 14:12:38 crc kubenswrapper[4634]: I0929 14:12:38.054989 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-992nz"] Sep 29 14:12:38 crc kubenswrapper[4634]: I0929 14:12:38.064022 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-6j4jw"] Sep 29 14:12:38 crc kubenswrapper[4634]: I0929 14:12:38.110580 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:12:38 crc kubenswrapper[4634]: E0929 14:12:38.111179 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:12:38 crc kubenswrapper[4634]: I0929 14:12:38.123193 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49f0c54b-0171-4bea-954f-83babe332811" path="/var/lib/kubelet/pods/49f0c54b-0171-4bea-954f-83babe332811/volumes" Sep 29 14:12:38 crc kubenswrapper[4634]: I0929 14:12:38.127074 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd2d3c84-abb0-49b0-9d73-19c907c704e7" path="/var/lib/kubelet/pods/bd2d3c84-abb0-49b0-9d73-19c907c704e7/volumes" Sep 29 14:12:38 crc kubenswrapper[4634]: I0929 14:12:38.129414 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f77b2366-66fb-44ca-8b0d-6dabfbe40e25" path="/var/lib/kubelet/pods/f77b2366-66fb-44ca-8b0d-6dabfbe40e25/volumes" Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.050200 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-23d6-account-create-wn7d9"] Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.062875 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-950d-account-create-n7qrh"] Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.073893 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-58a9-account-create-zghfd"] Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.082137 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-23d6-account-create-wn7d9"] Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.091252 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-950d-account-create-n7qrh"] Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.107826 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-58a9-account-create-zghfd"] Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.162718 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ab322f8-39ae-4378-8849-529322d8581b" path="/var/lib/kubelet/pods/0ab322f8-39ae-4378-8849-529322d8581b/volumes" Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.165056 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52ec169b-8f31-4857-9128-20365f472af1" path="/var/lib/kubelet/pods/52ec169b-8f31-4857-9128-20365f472af1/volumes" Sep 29 14:12:48 crc kubenswrapper[4634]: I0929 14:12:48.166614 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="896530d1-39a6-4aaf-b8d8-6cd06077f03d" path="/var/lib/kubelet/pods/896530d1-39a6-4aaf-b8d8-6cd06077f03d/volumes" Sep 29 14:12:50 crc kubenswrapper[4634]: I0929 14:12:50.798449 4634 scope.go:117] "RemoveContainer" containerID="72757820eb641bfc36cca0165ef8113f9cf429633cf3e888413b28f73e938bd3" Sep 29 14:12:50 crc kubenswrapper[4634]: I0929 14:12:50.832231 4634 scope.go:117] "RemoveContainer" containerID="ca674c40459b1f6c1f31b96187b68076a7e643622dc2a5a31b1d9157378dd40c" Sep 29 14:12:50 crc kubenswrapper[4634]: I0929 14:12:50.894283 4634 scope.go:117] "RemoveContainer" containerID="c5f6148230cfbaeb5f7a734e7f04c62a5daa46cddddaf9a16d2c517a6b04c2a3" Sep 29 14:12:50 crc kubenswrapper[4634]: I0929 14:12:50.942485 4634 scope.go:117] "RemoveContainer" containerID="790cb29278d2c5e47e79cd2404fedfb7e78756e0e2f7da3fc8559e85a356b633" Sep 29 14:12:50 crc kubenswrapper[4634]: I0929 14:12:50.973667 4634 scope.go:117] "RemoveContainer" containerID="66251841b61043a3040ed6c7ffd32fbea9a5edcde9ab823412f6787a37c506b5" Sep 29 14:12:51 crc kubenswrapper[4634]: I0929 14:12:51.051953 4634 scope.go:117] "RemoveContainer" containerID="d1fec606ae3f0fa6fb22c3e28a28c665404818614f5e8e36917d5c817fa61ead" Sep 29 14:12:51 crc kubenswrapper[4634]: I0929 14:12:51.098672 4634 scope.go:117] "RemoveContainer" containerID="f356dcedf361bfd1962700067b94718a478c4eb5d2e5a13c5f54fa05427a45c0" Sep 29 14:12:51 crc kubenswrapper[4634]: I0929 14:12:51.156345 4634 scope.go:117] "RemoveContainer" containerID="2b4ebe8a759619ad39585392d198aa2465b2c57af848ea24f0824946813034b2" Sep 29 14:12:52 crc kubenswrapper[4634]: I0929 14:12:52.110820 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:12:52 crc kubenswrapper[4634]: E0929 14:12:52.111397 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:13:04 crc kubenswrapper[4634]: I0929 14:13:04.111549 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:13:04 crc kubenswrapper[4634]: E0929 14:13:04.113379 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:13:11 crc kubenswrapper[4634]: I0929 14:13:11.062480 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-jcggm"] Sep 29 14:13:11 crc kubenswrapper[4634]: I0929 14:13:11.078564 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-pkjb5"] Sep 29 14:13:11 crc kubenswrapper[4634]: I0929 14:13:11.089537 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-9gvtl"] Sep 29 14:13:11 crc kubenswrapper[4634]: I0929 14:13:11.104234 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-jcggm"] Sep 29 14:13:11 crc kubenswrapper[4634]: I0929 14:13:11.116475 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-pkjb5"] Sep 29 14:13:11 crc kubenswrapper[4634]: I0929 14:13:11.124330 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-9gvtl"] Sep 29 14:13:12 crc kubenswrapper[4634]: I0929 14:13:12.125300 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8" path="/var/lib/kubelet/pods/4b9df70d-fa14-44a8-b3f7-ef6e1f9a79e8/volumes" Sep 29 14:13:12 crc kubenswrapper[4634]: I0929 14:13:12.127644 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73d87cd9-cadf-4579-b3e2-a2534568d559" path="/var/lib/kubelet/pods/73d87cd9-cadf-4579-b3e2-a2534568d559/volumes" Sep 29 14:13:12 crc kubenswrapper[4634]: I0929 14:13:12.129137 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b539ef0d-f88f-485d-853d-1f340eedb31c" path="/var/lib/kubelet/pods/b539ef0d-f88f-485d-853d-1f340eedb31c/volumes" Sep 29 14:13:18 crc kubenswrapper[4634]: I0929 14:13:18.110023 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:13:18 crc kubenswrapper[4634]: E0929 14:13:18.110496 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:13:21 crc kubenswrapper[4634]: I0929 14:13:21.036856 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-bwwg8"] Sep 29 14:13:21 crc kubenswrapper[4634]: I0929 14:13:21.046634 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-bwwg8"] Sep 29 14:13:22 crc kubenswrapper[4634]: I0929 14:13:22.122009 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1536d3a4-ce3c-429a-a2ff-0a50e1db22d3" path="/var/lib/kubelet/pods/1536d3a4-ce3c-429a-a2ff-0a50e1db22d3/volumes" Sep 29 14:13:28 crc kubenswrapper[4634]: I0929 14:13:28.085137 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-acca-account-create-nstwb"] Sep 29 14:13:28 crc kubenswrapper[4634]: I0929 14:13:28.136909 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-acca-account-create-nstwb"] Sep 29 14:13:29 crc kubenswrapper[4634]: I0929 14:13:29.027909 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0479-account-create-qsvzr"] Sep 29 14:13:29 crc kubenswrapper[4634]: I0929 14:13:29.038638 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b8d7-account-create-mwmmq"] Sep 29 14:13:29 crc kubenswrapper[4634]: I0929 14:13:29.048147 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b8d7-account-create-mwmmq"] Sep 29 14:13:29 crc kubenswrapper[4634]: I0929 14:13:29.057645 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-0479-account-create-qsvzr"] Sep 29 14:13:30 crc kubenswrapper[4634]: I0929 14:13:30.172499 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e21f408-bd65-4b12-8f48-dce2914e71ea" path="/var/lib/kubelet/pods/0e21f408-bd65-4b12-8f48-dce2914e71ea/volumes" Sep 29 14:13:30 crc kubenswrapper[4634]: I0929 14:13:30.174906 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53636ebe-a711-43c9-bad0-192536a09823" path="/var/lib/kubelet/pods/53636ebe-a711-43c9-bad0-192536a09823/volumes" Sep 29 14:13:30 crc kubenswrapper[4634]: I0929 14:13:30.176542 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f608b599-da98-42fc-be9c-54554cf111a3" path="/var/lib/kubelet/pods/f608b599-da98-42fc-be9c-54554cf111a3/volumes" Sep 29 14:13:32 crc kubenswrapper[4634]: I0929 14:13:32.110534 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:13:32 crc kubenswrapper[4634]: E0929 14:13:32.111202 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:13:33 crc kubenswrapper[4634]: I0929 14:13:33.511551 4634 generic.go:334] "Generic (PLEG): container finished" podID="5e3a5bac-db09-4bee-bc1a-a93841ada5ed" containerID="4373aea3aaff6077ba386b8f9e1fd475643acd205c1c702c98b5f4496421f56c" exitCode=0 Sep 29 14:13:33 crc kubenswrapper[4634]: I0929 14:13:33.511610 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" event={"ID":"5e3a5bac-db09-4bee-bc1a-a93841ada5ed","Type":"ContainerDied","Data":"4373aea3aaff6077ba386b8f9e1fd475643acd205c1c702c98b5f4496421f56c"} Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.029176 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.180630 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-bootstrap-combined-ca-bundle\") pod \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.181116 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-inventory\") pod \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.181261 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnl5w\" (UniqueName: \"kubernetes.io/projected/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-kube-api-access-pnl5w\") pod \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.181701 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-ssh-key\") pod \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\" (UID: \"5e3a5bac-db09-4bee-bc1a-a93841ada5ed\") " Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.189575 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "5e3a5bac-db09-4bee-bc1a-a93841ada5ed" (UID: "5e3a5bac-db09-4bee-bc1a-a93841ada5ed"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.206255 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-kube-api-access-pnl5w" (OuterVolumeSpecName: "kube-api-access-pnl5w") pod "5e3a5bac-db09-4bee-bc1a-a93841ada5ed" (UID: "5e3a5bac-db09-4bee-bc1a-a93841ada5ed"). InnerVolumeSpecName "kube-api-access-pnl5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.216631 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5e3a5bac-db09-4bee-bc1a-a93841ada5ed" (UID: "5e3a5bac-db09-4bee-bc1a-a93841ada5ed"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.225426 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-inventory" (OuterVolumeSpecName: "inventory") pod "5e3a5bac-db09-4bee-bc1a-a93841ada5ed" (UID: "5e3a5bac-db09-4bee-bc1a-a93841ada5ed"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.285221 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.285270 4634 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.285287 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.285297 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnl5w\" (UniqueName: \"kubernetes.io/projected/5e3a5bac-db09-4bee-bc1a-a93841ada5ed-kube-api-access-pnl5w\") on node \"crc\" DevicePath \"\"" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.530223 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" event={"ID":"5e3a5bac-db09-4bee-bc1a-a93841ada5ed","Type":"ContainerDied","Data":"53bfdae0307c82b5c61e2aa52824f4129aa2122a55e310ee3c63628476e62271"} Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.530260 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.530271 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53bfdae0307c82b5c61e2aa52824f4129aa2122a55e310ee3c63628476e62271" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.644838 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2"] Sep 29 14:13:35 crc kubenswrapper[4634]: E0929 14:13:35.645240 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e3a5bac-db09-4bee-bc1a-a93841ada5ed" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.645258 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e3a5bac-db09-4bee-bc1a-a93841ada5ed" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.645499 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e3a5bac-db09-4bee-bc1a-a93841ada5ed" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.646142 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.649338 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.649925 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.650158 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.650283 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.666562 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2"] Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.693140 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.693359 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqlnz\" (UniqueName: \"kubernetes.io/projected/268175fe-c76e-4032-8027-db49b1355ec7-kube-api-access-gqlnz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.693452 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.794727 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.795100 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqlnz\" (UniqueName: \"kubernetes.io/projected/268175fe-c76e-4032-8027-db49b1355ec7-kube-api-access-gqlnz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.795241 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.799311 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.800783 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.814175 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqlnz\" (UniqueName: \"kubernetes.io/projected/268175fe-c76e-4032-8027-db49b1355ec7-kube-api-access-gqlnz\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:35 crc kubenswrapper[4634]: I0929 14:13:35.968411 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:13:36 crc kubenswrapper[4634]: I0929 14:13:36.525320 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2"] Sep 29 14:13:36 crc kubenswrapper[4634]: I0929 14:13:36.536413 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:13:37 crc kubenswrapper[4634]: I0929 14:13:37.549002 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" event={"ID":"268175fe-c76e-4032-8027-db49b1355ec7","Type":"ContainerStarted","Data":"e07e3874e528d2d7ec6d90366e1554de6d45217a4e4940498d7c7487c6f231c9"} Sep 29 14:13:37 crc kubenswrapper[4634]: I0929 14:13:37.549248 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" event={"ID":"268175fe-c76e-4032-8027-db49b1355ec7","Type":"ContainerStarted","Data":"f11b4fc0490484a9fdef35b05e4bca416f62d7d72ef7fce84494f41f38a1ec40"} Sep 29 14:13:37 crc kubenswrapper[4634]: I0929 14:13:37.576246 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" podStartSLOduration=2.403561651 podStartE2EDuration="2.57622471s" podCreationTimestamp="2025-09-29 14:13:35 +0000 UTC" firstStartedPulling="2025-09-29 14:13:36.535906854 +0000 UTC m=+1747.104634613" lastFinishedPulling="2025-09-29 14:13:36.708569913 +0000 UTC m=+1747.277297672" observedRunningTime="2025-09-29 14:13:37.563279171 +0000 UTC m=+1748.132006940" watchObservedRunningTime="2025-09-29 14:13:37.57622471 +0000 UTC m=+1748.144952469" Sep 29 14:13:47 crc kubenswrapper[4634]: I0929 14:13:47.110633 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:13:47 crc kubenswrapper[4634]: E0929 14:13:47.111317 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:13:48 crc kubenswrapper[4634]: I0929 14:13:48.032790 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-25zsb"] Sep 29 14:13:48 crc kubenswrapper[4634]: I0929 14:13:48.041233 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-25zsb"] Sep 29 14:13:48 crc kubenswrapper[4634]: I0929 14:13:48.153010 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43cee9c9-8b49-4b42-a525-ccd9ab1a9730" path="/var/lib/kubelet/pods/43cee9c9-8b49-4b42-a525-ccd9ab1a9730/volumes" Sep 29 14:13:51 crc kubenswrapper[4634]: I0929 14:13:51.361712 4634 scope.go:117] "RemoveContainer" containerID="93e8e4d824f24622ca6409aafd5fb701c7187189c086884a1328c10d2439f3f5" Sep 29 14:13:51 crc kubenswrapper[4634]: I0929 14:13:51.399780 4634 scope.go:117] "RemoveContainer" containerID="6ac1ab9378ea3d79c5cd67906f9795bfb68650cbdfb5b1237559e60fbba1ee9e" Sep 29 14:13:51 crc kubenswrapper[4634]: I0929 14:13:51.450509 4634 scope.go:117] "RemoveContainer" containerID="0ab2e3ac4ec4ec2bb0e10bd7f8c433d0a587dc58fb1b8544e95a404a5a38cc04" Sep 29 14:13:51 crc kubenswrapper[4634]: I0929 14:13:51.523173 4634 scope.go:117] "RemoveContainer" containerID="7ebb0c1c2f2a2aecd1e5964b99c3d5fa9b72bc819bec1584c801135bc771f7fb" Sep 29 14:13:51 crc kubenswrapper[4634]: I0929 14:13:51.556017 4634 scope.go:117] "RemoveContainer" containerID="adc5419551dd448ee50d381c71870cd1922e3728a99bdf7930bb6220c24d17ec" Sep 29 14:13:51 crc kubenswrapper[4634]: I0929 14:13:51.607480 4634 scope.go:117] "RemoveContainer" containerID="b1c71828c67b5f19e9593d4e690bc3a64235594808963e2d181d6ba42a069b08" Sep 29 14:13:51 crc kubenswrapper[4634]: I0929 14:13:51.656649 4634 scope.go:117] "RemoveContainer" containerID="d730ead07dad9c63ea9062c8d087b4d20d4fb00b74390ea490db7d292159f9e4" Sep 29 14:13:51 crc kubenswrapper[4634]: I0929 14:13:51.676705 4634 scope.go:117] "RemoveContainer" containerID="0cd7ea42be56e61373ba79946e83c00c514c6213ecfc42d0f1beac026e1f0818" Sep 29 14:13:59 crc kubenswrapper[4634]: I0929 14:13:59.110686 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:13:59 crc kubenswrapper[4634]: E0929 14:13:59.111453 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:14:11 crc kubenswrapper[4634]: I0929 14:14:11.046408 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-rp2lh"] Sep 29 14:14:11 crc kubenswrapper[4634]: I0929 14:14:11.055939 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-rp2lh"] Sep 29 14:14:12 crc kubenswrapper[4634]: I0929 14:14:12.110817 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:14:12 crc kubenswrapper[4634]: E0929 14:14:12.111322 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:14:12 crc kubenswrapper[4634]: I0929 14:14:12.125776 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dfb65bf-8466-4184-a169-755fbb7d65d9" path="/var/lib/kubelet/pods/1dfb65bf-8466-4184-a169-755fbb7d65d9/volumes" Sep 29 14:14:25 crc kubenswrapper[4634]: I0929 14:14:25.110434 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:14:25 crc kubenswrapper[4634]: E0929 14:14:25.111219 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:14:34 crc kubenswrapper[4634]: I0929 14:14:34.077193 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-bqjnz"] Sep 29 14:14:34 crc kubenswrapper[4634]: I0929 14:14:34.090948 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-bqjnz"] Sep 29 14:14:34 crc kubenswrapper[4634]: I0929 14:14:34.157558 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aac1358d-d39f-4732-97fa-cc8947c81bdb" path="/var/lib/kubelet/pods/aac1358d-d39f-4732-97fa-cc8947c81bdb/volumes" Sep 29 14:14:36 crc kubenswrapper[4634]: I0929 14:14:36.110753 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:14:36 crc kubenswrapper[4634]: E0929 14:14:36.111669 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:14:38 crc kubenswrapper[4634]: I0929 14:14:38.028050 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-577dn"] Sep 29 14:14:38 crc kubenswrapper[4634]: I0929 14:14:38.037631 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-6927h"] Sep 29 14:14:38 crc kubenswrapper[4634]: I0929 14:14:38.047198 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-6927h"] Sep 29 14:14:38 crc kubenswrapper[4634]: I0929 14:14:38.055239 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-577dn"] Sep 29 14:14:38 crc kubenswrapper[4634]: I0929 14:14:38.128875 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c1084d0-17b1-40a1-b57e-11e41ad8db3b" path="/var/lib/kubelet/pods/1c1084d0-17b1-40a1-b57e-11e41ad8db3b/volumes" Sep 29 14:14:38 crc kubenswrapper[4634]: I0929 14:14:38.129831 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5cbaa37-b66c-4549-9ccd-e9ba5771038a" path="/var/lib/kubelet/pods/a5cbaa37-b66c-4549-9ccd-e9ba5771038a/volumes" Sep 29 14:14:50 crc kubenswrapper[4634]: I0929 14:14:50.116452 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:14:50 crc kubenswrapper[4634]: E0929 14:14:50.117277 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:14:51 crc kubenswrapper[4634]: I0929 14:14:51.881282 4634 scope.go:117] "RemoveContainer" containerID="99221a0eaa7e0a10c48f6c4f5cada96e65e5b31dbcfab1eb29288d0f72b7ea56" Sep 29 14:14:51 crc kubenswrapper[4634]: I0929 14:14:51.915701 4634 scope.go:117] "RemoveContainer" containerID="933b8bbb36dfb007f66544045aa0662ae838762226822174e515759f482fd247" Sep 29 14:14:51 crc kubenswrapper[4634]: I0929 14:14:51.970403 4634 scope.go:117] "RemoveContainer" containerID="bb5673135f867de6d57893626f372f653b907692d0d78c44e581635e7e400137" Sep 29 14:14:52 crc kubenswrapper[4634]: I0929 14:14:52.014346 4634 scope.go:117] "RemoveContainer" containerID="5e2afff12bdd152a5f432cbdd8f291fe2e553a524d5bd99a629b9c68ad24d2cd" Sep 29 14:14:58 crc kubenswrapper[4634]: I0929 14:14:58.037256 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-dsvdn"] Sep 29 14:14:58 crc kubenswrapper[4634]: I0929 14:14:58.043326 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-dsvdn"] Sep 29 14:14:58 crc kubenswrapper[4634]: I0929 14:14:58.138112 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="519a22f3-5513-430a-bd2b-6670ece06c2d" path="/var/lib/kubelet/pods/519a22f3-5513-430a-bd2b-6670ece06c2d/volumes" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.155100 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk"] Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.156797 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.162205 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.172240 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.190368 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk"] Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.214508 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-config-volume\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.214863 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42czc\" (UniqueName: \"kubernetes.io/projected/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-kube-api-access-42czc\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.214975 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-secret-volume\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.316909 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42czc\" (UniqueName: \"kubernetes.io/projected/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-kube-api-access-42czc\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.316976 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-secret-volume\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.317079 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-config-volume\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.318373 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-config-volume\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.337968 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-secret-volume\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.341957 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42czc\" (UniqueName: \"kubernetes.io/projected/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-kube-api-access-42czc\") pod \"collect-profiles-29319255-xvgqk\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.478830 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:00 crc kubenswrapper[4634]: I0929 14:15:00.983885 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk"] Sep 29 14:15:01 crc kubenswrapper[4634]: I0929 14:15:01.508708 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" event={"ID":"06dcdd4d-8ef9-43c8-924e-87fe5c67d329","Type":"ContainerStarted","Data":"74154ed93de8288ee30f5d88849bf40dd98c32004779b9939a6baec5c775ed8a"} Sep 29 14:15:01 crc kubenswrapper[4634]: I0929 14:15:01.510231 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" event={"ID":"06dcdd4d-8ef9-43c8-924e-87fe5c67d329","Type":"ContainerStarted","Data":"66623bd61fe361e2a28d701d862e3018c045c8bbe5c7a32d56fbee7186de1189"} Sep 29 14:15:01 crc kubenswrapper[4634]: I0929 14:15:01.525824 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" podStartSLOduration=1.5258043350000001 podStartE2EDuration="1.525804335s" podCreationTimestamp="2025-09-29 14:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:15:01.52489272 +0000 UTC m=+1832.093620489" watchObservedRunningTime="2025-09-29 14:15:01.525804335 +0000 UTC m=+1832.094532094" Sep 29 14:15:02 crc kubenswrapper[4634]: I0929 14:15:02.522393 4634 generic.go:334] "Generic (PLEG): container finished" podID="06dcdd4d-8ef9-43c8-924e-87fe5c67d329" containerID="74154ed93de8288ee30f5d88849bf40dd98c32004779b9939a6baec5c775ed8a" exitCode=0 Sep 29 14:15:02 crc kubenswrapper[4634]: I0929 14:15:02.522448 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" event={"ID":"06dcdd4d-8ef9-43c8-924e-87fe5c67d329","Type":"ContainerDied","Data":"74154ed93de8288ee30f5d88849bf40dd98c32004779b9939a6baec5c775ed8a"} Sep 29 14:15:03 crc kubenswrapper[4634]: I0929 14:15:03.895098 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.045524 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-config-volume\") pod \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.045749 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42czc\" (UniqueName: \"kubernetes.io/projected/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-kube-api-access-42czc\") pod \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.046548 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-config-volume" (OuterVolumeSpecName: "config-volume") pod "06dcdd4d-8ef9-43c8-924e-87fe5c67d329" (UID: "06dcdd4d-8ef9-43c8-924e-87fe5c67d329"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.046821 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-secret-volume\") pod \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\" (UID: \"06dcdd4d-8ef9-43c8-924e-87fe5c67d329\") " Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.047309 4634 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.065770 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "06dcdd4d-8ef9-43c8-924e-87fe5c67d329" (UID: "06dcdd4d-8ef9-43c8-924e-87fe5c67d329"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.066242 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-kube-api-access-42czc" (OuterVolumeSpecName: "kube-api-access-42czc") pod "06dcdd4d-8ef9-43c8-924e-87fe5c67d329" (UID: "06dcdd4d-8ef9-43c8-924e-87fe5c67d329"). InnerVolumeSpecName "kube-api-access-42czc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.149875 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42czc\" (UniqueName: \"kubernetes.io/projected/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-kube-api-access-42czc\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.149988 4634 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/06dcdd4d-8ef9-43c8-924e-87fe5c67d329-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.539564 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" event={"ID":"06dcdd4d-8ef9-43c8-924e-87fe5c67d329","Type":"ContainerDied","Data":"66623bd61fe361e2a28d701d862e3018c045c8bbe5c7a32d56fbee7186de1189"} Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.539607 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66623bd61fe361e2a28d701d862e3018c045c8bbe5c7a32d56fbee7186de1189" Sep 29 14:15:04 crc kubenswrapper[4634]: I0929 14:15:04.539609 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk" Sep 29 14:15:05 crc kubenswrapper[4634]: I0929 14:15:05.110552 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:15:05 crc kubenswrapper[4634]: E0929 14:15:05.111175 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:15:16 crc kubenswrapper[4634]: I0929 14:15:16.110319 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:15:16 crc kubenswrapper[4634]: I0929 14:15:16.643196 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"4b3f11a3b532b31a5f39dc3954037054fe5968d53a04cc1b756a9b0a70e96890"} Sep 29 14:15:21 crc kubenswrapper[4634]: I0929 14:15:21.032154 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-jnxdv"] Sep 29 14:15:21 crc kubenswrapper[4634]: I0929 14:15:21.043217 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-jnxdv"] Sep 29 14:15:21 crc kubenswrapper[4634]: I0929 14:15:21.059186 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-lbqsf"] Sep 29 14:15:21 crc kubenswrapper[4634]: I0929 14:15:21.071495 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-jps78"] Sep 29 14:15:21 crc kubenswrapper[4634]: I0929 14:15:21.079503 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-lbqsf"] Sep 29 14:15:21 crc kubenswrapper[4634]: I0929 14:15:21.085834 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-jps78"] Sep 29 14:15:22 crc kubenswrapper[4634]: I0929 14:15:22.139850 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2280fa9-785d-467b-a50c-010d55023f64" path="/var/lib/kubelet/pods/e2280fa9-785d-467b-a50c-010d55023f64/volumes" Sep 29 14:15:22 crc kubenswrapper[4634]: I0929 14:15:22.140985 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e673e0f3-02fe-40fb-8fc5-8ab681f26068" path="/var/lib/kubelet/pods/e673e0f3-02fe-40fb-8fc5-8ab681f26068/volumes" Sep 29 14:15:22 crc kubenswrapper[4634]: I0929 14:15:22.141580 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eab330ba-5ee5-43b8-b7a5-8c229c3447c3" path="/var/lib/kubelet/pods/eab330ba-5ee5-43b8-b7a5-8c229c3447c3/volumes" Sep 29 14:15:35 crc kubenswrapper[4634]: I0929 14:15:35.821144 4634 generic.go:334] "Generic (PLEG): container finished" podID="268175fe-c76e-4032-8027-db49b1355ec7" containerID="e07e3874e528d2d7ec6d90366e1554de6d45217a4e4940498d7c7487c6f231c9" exitCode=0 Sep 29 14:15:35 crc kubenswrapper[4634]: I0929 14:15:35.821206 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" event={"ID":"268175fe-c76e-4032-8027-db49b1355ec7","Type":"ContainerDied","Data":"e07e3874e528d2d7ec6d90366e1554de6d45217a4e4940498d7c7487c6f231c9"} Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.264739 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.360224 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqlnz\" (UniqueName: \"kubernetes.io/projected/268175fe-c76e-4032-8027-db49b1355ec7-kube-api-access-gqlnz\") pod \"268175fe-c76e-4032-8027-db49b1355ec7\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.360409 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-ssh-key\") pod \"268175fe-c76e-4032-8027-db49b1355ec7\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.360495 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-inventory\") pod \"268175fe-c76e-4032-8027-db49b1355ec7\" (UID: \"268175fe-c76e-4032-8027-db49b1355ec7\") " Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.368520 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/268175fe-c76e-4032-8027-db49b1355ec7-kube-api-access-gqlnz" (OuterVolumeSpecName: "kube-api-access-gqlnz") pod "268175fe-c76e-4032-8027-db49b1355ec7" (UID: "268175fe-c76e-4032-8027-db49b1355ec7"). InnerVolumeSpecName "kube-api-access-gqlnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.399117 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-inventory" (OuterVolumeSpecName: "inventory") pod "268175fe-c76e-4032-8027-db49b1355ec7" (UID: "268175fe-c76e-4032-8027-db49b1355ec7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.408069 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "268175fe-c76e-4032-8027-db49b1355ec7" (UID: "268175fe-c76e-4032-8027-db49b1355ec7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.464090 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqlnz\" (UniqueName: \"kubernetes.io/projected/268175fe-c76e-4032-8027-db49b1355ec7-kube-api-access-gqlnz\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.464275 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.464379 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/268175fe-c76e-4032-8027-db49b1355ec7-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.839525 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" event={"ID":"268175fe-c76e-4032-8027-db49b1355ec7","Type":"ContainerDied","Data":"f11b4fc0490484a9fdef35b05e4bca416f62d7d72ef7fce84494f41f38a1ec40"} Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.839570 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f11b4fc0490484a9fdef35b05e4bca416f62d7d72ef7fce84494f41f38a1ec40" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.839602 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.941235 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc"] Sep 29 14:15:37 crc kubenswrapper[4634]: E0929 14:15:37.942888 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268175fe-c76e-4032-8027-db49b1355ec7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.942983 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="268175fe-c76e-4032-8027-db49b1355ec7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 14:15:37 crc kubenswrapper[4634]: E0929 14:15:37.943105 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06dcdd4d-8ef9-43c8-924e-87fe5c67d329" containerName="collect-profiles" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.943180 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="06dcdd4d-8ef9-43c8-924e-87fe5c67d329" containerName="collect-profiles" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.943515 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="268175fe-c76e-4032-8027-db49b1355ec7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.943616 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="06dcdd4d-8ef9-43c8-924e-87fe5c67d329" containerName="collect-profiles" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.944406 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.951119 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.951170 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.951259 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.951371 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.962196 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc"] Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.986071 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.986153 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl9mw\" (UniqueName: \"kubernetes.io/projected/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-kube-api-access-kl9mw\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:37 crc kubenswrapper[4634]: I0929 14:15:37.986322 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:38 crc kubenswrapper[4634]: I0929 14:15:38.088481 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:38 crc kubenswrapper[4634]: I0929 14:15:38.088528 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl9mw\" (UniqueName: \"kubernetes.io/projected/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-kube-api-access-kl9mw\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:38 crc kubenswrapper[4634]: I0929 14:15:38.088633 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:38 crc kubenswrapper[4634]: I0929 14:15:38.093931 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:38 crc kubenswrapper[4634]: I0929 14:15:38.094257 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:38 crc kubenswrapper[4634]: I0929 14:15:38.109672 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl9mw\" (UniqueName: \"kubernetes.io/projected/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-kube-api-access-kl9mw\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:38 crc kubenswrapper[4634]: I0929 14:15:38.261730 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:15:38 crc kubenswrapper[4634]: I0929 14:15:38.836872 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc"] Sep 29 14:15:39 crc kubenswrapper[4634]: I0929 14:15:39.866538 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" event={"ID":"d8fe0f99-6eea-49ca-bf34-fd88555c84ec","Type":"ContainerStarted","Data":"cc84aecdb5394bc63c2a46701034d8b557ccb7117f2b4270b5ba97eb1013e304"} Sep 29 14:15:40 crc kubenswrapper[4634]: I0929 14:15:40.881346 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" event={"ID":"d8fe0f99-6eea-49ca-bf34-fd88555c84ec","Type":"ContainerStarted","Data":"9e0ae788158f8a42e2e8a229b1f462854a75da32a917f0f2e72a6754b06a1121"} Sep 29 14:15:40 crc kubenswrapper[4634]: I0929 14:15:40.904923 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" podStartSLOduration=2.944361117 podStartE2EDuration="3.90490559s" podCreationTimestamp="2025-09-29 14:15:37 +0000 UTC" firstStartedPulling="2025-09-29 14:15:38.858858187 +0000 UTC m=+1869.427585936" lastFinishedPulling="2025-09-29 14:15:39.81940266 +0000 UTC m=+1870.388130409" observedRunningTime="2025-09-29 14:15:40.89410002 +0000 UTC m=+1871.462827759" watchObservedRunningTime="2025-09-29 14:15:40.90490559 +0000 UTC m=+1871.473633339" Sep 29 14:15:51 crc kubenswrapper[4634]: I0929 14:15:51.038698 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-a71a-account-create-2d57l"] Sep 29 14:15:51 crc kubenswrapper[4634]: I0929 14:15:51.050150 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-a71a-account-create-2d57l"] Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.028838 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-f016-account-create-srmqs"] Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.038074 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-d98c-account-create-2f7ww"] Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.048016 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-d98c-account-create-2f7ww"] Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.056993 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-f016-account-create-srmqs"] Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.128416 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e65b300-4b51-45f3-8a31-9a95755cb81a" path="/var/lib/kubelet/pods/2e65b300-4b51-45f3-8a31-9a95755cb81a/volumes" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.129122 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="575fd438-a63c-4fe1-8bc7-e8d293176ec0" path="/var/lib/kubelet/pods/575fd438-a63c-4fe1-8bc7-e8d293176ec0/volumes" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.129751 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d5b014b-6225-45cf-afcd-4263a6347f2d" path="/var/lib/kubelet/pods/5d5b014b-6225-45cf-afcd-4263a6347f2d/volumes" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.137601 4634 scope.go:117] "RemoveContainer" containerID="c229fa6418aa7fb8d33facb2cf9b9e06e03ed5f62b0b6153d961c84ad1017816" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.176153 4634 scope.go:117] "RemoveContainer" containerID="41f071213ddd2359d56c2bcec7b71b96726b49ba592d01abba927ba746c1f3d6" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.214232 4634 scope.go:117] "RemoveContainer" containerID="b85849bb737513a06fb85cdc3cdd5d8819d2622400db7de04265d258e1723405" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.270558 4634 scope.go:117] "RemoveContainer" containerID="8b93dc62bae995a6ba3c042ce8aeef8879e8ee6a80ac5b251633cadfc95d412e" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.320667 4634 scope.go:117] "RemoveContainer" containerID="d7559e56cdd92910bba890d7d2fa3853a747697534885ed0b940e3a95bdf738b" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.368317 4634 scope.go:117] "RemoveContainer" containerID="95561b3d9bf0df8cd81d8da175a9bf638c826d122eca76f548f5467310daf6e4" Sep 29 14:15:52 crc kubenswrapper[4634]: I0929 14:15:52.411893 4634 scope.go:117] "RemoveContainer" containerID="9829540ad9feff8aacb633083bd2ae8e56f580e4e92ce0f17976cbe16c053914" Sep 29 14:16:27 crc kubenswrapper[4634]: I0929 14:16:27.076546 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjgff"] Sep 29 14:16:27 crc kubenswrapper[4634]: I0929 14:16:27.091063 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-gjgff"] Sep 29 14:16:28 crc kubenswrapper[4634]: I0929 14:16:28.129887 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dec8206e-efde-472c-9342-1ac2e0913508" path="/var/lib/kubelet/pods/dec8206e-efde-472c-9342-1ac2e0913508/volumes" Sep 29 14:16:52 crc kubenswrapper[4634]: I0929 14:16:52.565005 4634 scope.go:117] "RemoveContainer" containerID="30f71aa55e79d407cb936964a158516f3fa28f5592ee3f678476206d3325268e" Sep 29 14:16:57 crc kubenswrapper[4634]: I0929 14:16:57.052740 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-drmmw"] Sep 29 14:16:57 crc kubenswrapper[4634]: I0929 14:16:57.065733 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-drmmw"] Sep 29 14:16:58 crc kubenswrapper[4634]: I0929 14:16:58.036928 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-x8ss8"] Sep 29 14:16:58 crc kubenswrapper[4634]: I0929 14:16:58.047382 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-x8ss8"] Sep 29 14:16:58 crc kubenswrapper[4634]: I0929 14:16:58.127146 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a923015-986d-4efd-9f6d-dcae7f51d7a2" path="/var/lib/kubelet/pods/0a923015-986d-4efd-9f6d-dcae7f51d7a2/volumes" Sep 29 14:16:58 crc kubenswrapper[4634]: I0929 14:16:58.131910 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25131260-fc44-4b6a-beb9-98cc9ce0f27d" path="/var/lib/kubelet/pods/25131260-fc44-4b6a-beb9-98cc9ce0f27d/volumes" Sep 29 14:17:00 crc kubenswrapper[4634]: I0929 14:17:00.557737 4634 generic.go:334] "Generic (PLEG): container finished" podID="d8fe0f99-6eea-49ca-bf34-fd88555c84ec" containerID="9e0ae788158f8a42e2e8a229b1f462854a75da32a917f0f2e72a6754b06a1121" exitCode=0 Sep 29 14:17:00 crc kubenswrapper[4634]: I0929 14:17:00.557822 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" event={"ID":"d8fe0f99-6eea-49ca-bf34-fd88555c84ec","Type":"ContainerDied","Data":"9e0ae788158f8a42e2e8a229b1f462854a75da32a917f0f2e72a6754b06a1121"} Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.228593 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.336552 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-ssh-key\") pod \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.336701 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kl9mw\" (UniqueName: \"kubernetes.io/projected/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-kube-api-access-kl9mw\") pod \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.336826 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-inventory\") pod \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\" (UID: \"d8fe0f99-6eea-49ca-bf34-fd88555c84ec\") " Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.341933 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-kube-api-access-kl9mw" (OuterVolumeSpecName: "kube-api-access-kl9mw") pod "d8fe0f99-6eea-49ca-bf34-fd88555c84ec" (UID: "d8fe0f99-6eea-49ca-bf34-fd88555c84ec"). InnerVolumeSpecName "kube-api-access-kl9mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.367215 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-inventory" (OuterVolumeSpecName: "inventory") pod "d8fe0f99-6eea-49ca-bf34-fd88555c84ec" (UID: "d8fe0f99-6eea-49ca-bf34-fd88555c84ec"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.368672 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d8fe0f99-6eea-49ca-bf34-fd88555c84ec" (UID: "d8fe0f99-6eea-49ca-bf34-fd88555c84ec"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.438624 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.438753 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.438821 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kl9mw\" (UniqueName: \"kubernetes.io/projected/d8fe0f99-6eea-49ca-bf34-fd88555c84ec-kube-api-access-kl9mw\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.576275 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" event={"ID":"d8fe0f99-6eea-49ca-bf34-fd88555c84ec","Type":"ContainerDied","Data":"cc84aecdb5394bc63c2a46701034d8b557ccb7117f2b4270b5ba97eb1013e304"} Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.576316 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc84aecdb5394bc63c2a46701034d8b557ccb7117f2b4270b5ba97eb1013e304" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.576371 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.675687 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4"] Sep 29 14:17:02 crc kubenswrapper[4634]: E0929 14:17:02.676192 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8fe0f99-6eea-49ca-bf34-fd88555c84ec" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.676217 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8fe0f99-6eea-49ca-bf34-fd88555c84ec" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.676475 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8fe0f99-6eea-49ca-bf34-fd88555c84ec" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.677241 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.679353 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.679479 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.679621 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.679745 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.689282 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4"] Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.744270 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.744391 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.744418 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2ppr\" (UniqueName: \"kubernetes.io/projected/572bf9da-bb03-48be-b902-48ea1755346d-kube-api-access-h2ppr\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.846935 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.846987 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2ppr\" (UniqueName: \"kubernetes.io/projected/572bf9da-bb03-48be-b902-48ea1755346d-kube-api-access-h2ppr\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.847173 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.852699 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.853500 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.866038 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2ppr\" (UniqueName: \"kubernetes.io/projected/572bf9da-bb03-48be-b902-48ea1755346d-kube-api-access-h2ppr\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-thjn4\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:02 crc kubenswrapper[4634]: I0929 14:17:02.995959 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:03 crc kubenswrapper[4634]: I0929 14:17:03.522379 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4"] Sep 29 14:17:03 crc kubenswrapper[4634]: I0929 14:17:03.585627 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" event={"ID":"572bf9da-bb03-48be-b902-48ea1755346d","Type":"ContainerStarted","Data":"98ad1bc8022cffc5f9dbc11749a7a585a80d11e569472da342c3d280bcf1d172"} Sep 29 14:17:04 crc kubenswrapper[4634]: I0929 14:17:04.605284 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" event={"ID":"572bf9da-bb03-48be-b902-48ea1755346d","Type":"ContainerStarted","Data":"71095577d5d1dd6d7f054c6860eee98769b41122f52181c770617d8d050a74df"} Sep 29 14:17:04 crc kubenswrapper[4634]: I0929 14:17:04.626247 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" podStartSLOduration=2.328806289 podStartE2EDuration="2.626217551s" podCreationTimestamp="2025-09-29 14:17:02 +0000 UTC" firstStartedPulling="2025-09-29 14:17:03.530370834 +0000 UTC m=+1954.099098583" lastFinishedPulling="2025-09-29 14:17:03.827782106 +0000 UTC m=+1954.396509845" observedRunningTime="2025-09-29 14:17:04.619681963 +0000 UTC m=+1955.188409762" watchObservedRunningTime="2025-09-29 14:17:04.626217551 +0000 UTC m=+1955.194945380" Sep 29 14:17:09 crc kubenswrapper[4634]: I0929 14:17:09.658282 4634 generic.go:334] "Generic (PLEG): container finished" podID="572bf9da-bb03-48be-b902-48ea1755346d" containerID="71095577d5d1dd6d7f054c6860eee98769b41122f52181c770617d8d050a74df" exitCode=0 Sep 29 14:17:09 crc kubenswrapper[4634]: I0929 14:17:09.662012 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" event={"ID":"572bf9da-bb03-48be-b902-48ea1755346d","Type":"ContainerDied","Data":"71095577d5d1dd6d7f054c6860eee98769b41122f52181c770617d8d050a74df"} Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.133520 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.209060 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2ppr\" (UniqueName: \"kubernetes.io/projected/572bf9da-bb03-48be-b902-48ea1755346d-kube-api-access-h2ppr\") pod \"572bf9da-bb03-48be-b902-48ea1755346d\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.209213 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-inventory\") pod \"572bf9da-bb03-48be-b902-48ea1755346d\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.209318 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-ssh-key\") pod \"572bf9da-bb03-48be-b902-48ea1755346d\" (UID: \"572bf9da-bb03-48be-b902-48ea1755346d\") " Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.226498 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/572bf9da-bb03-48be-b902-48ea1755346d-kube-api-access-h2ppr" (OuterVolumeSpecName: "kube-api-access-h2ppr") pod "572bf9da-bb03-48be-b902-48ea1755346d" (UID: "572bf9da-bb03-48be-b902-48ea1755346d"). InnerVolumeSpecName "kube-api-access-h2ppr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.240693 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "572bf9da-bb03-48be-b902-48ea1755346d" (UID: "572bf9da-bb03-48be-b902-48ea1755346d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.241031 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-inventory" (OuterVolumeSpecName: "inventory") pod "572bf9da-bb03-48be-b902-48ea1755346d" (UID: "572bf9da-bb03-48be-b902-48ea1755346d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.314125 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2ppr\" (UniqueName: \"kubernetes.io/projected/572bf9da-bb03-48be-b902-48ea1755346d-kube-api-access-h2ppr\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.314186 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.314202 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/572bf9da-bb03-48be-b902-48ea1755346d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.678881 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" event={"ID":"572bf9da-bb03-48be-b902-48ea1755346d","Type":"ContainerDied","Data":"98ad1bc8022cffc5f9dbc11749a7a585a80d11e569472da342c3d280bcf1d172"} Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.678918 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98ad1bc8022cffc5f9dbc11749a7a585a80d11e569472da342c3d280bcf1d172" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.678963 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-thjn4" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.744367 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264"] Sep 29 14:17:11 crc kubenswrapper[4634]: E0929 14:17:11.744918 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="572bf9da-bb03-48be-b902-48ea1755346d" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.744983 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="572bf9da-bb03-48be-b902-48ea1755346d" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.745239 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="572bf9da-bb03-48be-b902-48ea1755346d" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.746095 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.748280 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.748584 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.748697 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.750902 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.754873 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264"] Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.819953 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-562xs\" (UniqueName: \"kubernetes.io/projected/ca087dcb-6346-46ed-9750-b5548355305a-kube-api-access-562xs\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.820016 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.820109 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.921461 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-562xs\" (UniqueName: \"kubernetes.io/projected/ca087dcb-6346-46ed-9750-b5548355305a-kube-api-access-562xs\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.921529 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.921614 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.932982 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.933846 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:11 crc kubenswrapper[4634]: I0929 14:17:11.936327 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-562xs\" (UniqueName: \"kubernetes.io/projected/ca087dcb-6346-46ed-9750-b5548355305a-kube-api-access-562xs\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-zs264\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:12 crc kubenswrapper[4634]: I0929 14:17:12.067177 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:12 crc kubenswrapper[4634]: I0929 14:17:12.410937 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264"] Sep 29 14:17:12 crc kubenswrapper[4634]: I0929 14:17:12.704452 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" event={"ID":"ca087dcb-6346-46ed-9750-b5548355305a","Type":"ContainerStarted","Data":"ba084854649e0ffff6ef3400627efa4287b940afbd5838bcb1e26ba640210c1a"} Sep 29 14:17:13 crc kubenswrapper[4634]: I0929 14:17:13.716994 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" event={"ID":"ca087dcb-6346-46ed-9750-b5548355305a","Type":"ContainerStarted","Data":"bcef42ea3c827167b2c7aa3e7f7822d8cfb3d7ac5e5968c3fdfcb703879c7a59"} Sep 29 14:17:13 crc kubenswrapper[4634]: I0929 14:17:13.749476 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" podStartSLOduration=2.215857776 podStartE2EDuration="2.749458926s" podCreationTimestamp="2025-09-29 14:17:11 +0000 UTC" firstStartedPulling="2025-09-29 14:17:12.40253474 +0000 UTC m=+1962.971262489" lastFinishedPulling="2025-09-29 14:17:12.93613589 +0000 UTC m=+1963.504863639" observedRunningTime="2025-09-29 14:17:13.743268768 +0000 UTC m=+1964.311996527" watchObservedRunningTime="2025-09-29 14:17:13.749458926 +0000 UTC m=+1964.318186675" Sep 29 14:17:42 crc kubenswrapper[4634]: I0929 14:17:42.055633 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-dvmgw"] Sep 29 14:17:42 crc kubenswrapper[4634]: I0929 14:17:42.062741 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-dvmgw"] Sep 29 14:17:42 crc kubenswrapper[4634]: I0929 14:17:42.119909 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e00a014d-19be-46d4-91e0-0b9a34160195" path="/var/lib/kubelet/pods/e00a014d-19be-46d4-91e0-0b9a34160195/volumes" Sep 29 14:17:44 crc kubenswrapper[4634]: I0929 14:17:44.396449 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:17:44 crc kubenswrapper[4634]: I0929 14:17:44.396818 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:17:52 crc kubenswrapper[4634]: I0929 14:17:52.658890 4634 scope.go:117] "RemoveContainer" containerID="ba446259fee40f5bfbfe0a21049dd3f8566c7d82243fd2bd3137d51fcaf4b104" Sep 29 14:17:52 crc kubenswrapper[4634]: I0929 14:17:52.717960 4634 scope.go:117] "RemoveContainer" containerID="e51de5759532a1462c272d8c9a3f4b7a790fc30ef4279d18b24024b7218795e8" Sep 29 14:17:52 crc kubenswrapper[4634]: I0929 14:17:52.783183 4634 scope.go:117] "RemoveContainer" containerID="d1c4161a80399153cfcef26a83f09da2660c377b67fa74973462031b86b568e1" Sep 29 14:17:57 crc kubenswrapper[4634]: I0929 14:17:57.155596 4634 generic.go:334] "Generic (PLEG): container finished" podID="ca087dcb-6346-46ed-9750-b5548355305a" containerID="bcef42ea3c827167b2c7aa3e7f7822d8cfb3d7ac5e5968c3fdfcb703879c7a59" exitCode=0 Sep 29 14:17:57 crc kubenswrapper[4634]: I0929 14:17:57.155805 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" event={"ID":"ca087dcb-6346-46ed-9750-b5548355305a","Type":"ContainerDied","Data":"bcef42ea3c827167b2c7aa3e7f7822d8cfb3d7ac5e5968c3fdfcb703879c7a59"} Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.624689 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.785151 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-ssh-key\") pod \"ca087dcb-6346-46ed-9750-b5548355305a\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.785203 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-inventory\") pod \"ca087dcb-6346-46ed-9750-b5548355305a\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.785257 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-562xs\" (UniqueName: \"kubernetes.io/projected/ca087dcb-6346-46ed-9750-b5548355305a-kube-api-access-562xs\") pod \"ca087dcb-6346-46ed-9750-b5548355305a\" (UID: \"ca087dcb-6346-46ed-9750-b5548355305a\") " Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.802022 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca087dcb-6346-46ed-9750-b5548355305a-kube-api-access-562xs" (OuterVolumeSpecName: "kube-api-access-562xs") pod "ca087dcb-6346-46ed-9750-b5548355305a" (UID: "ca087dcb-6346-46ed-9750-b5548355305a"). InnerVolumeSpecName "kube-api-access-562xs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.813359 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-inventory" (OuterVolumeSpecName: "inventory") pod "ca087dcb-6346-46ed-9750-b5548355305a" (UID: "ca087dcb-6346-46ed-9750-b5548355305a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.813549 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ca087dcb-6346-46ed-9750-b5548355305a" (UID: "ca087dcb-6346-46ed-9750-b5548355305a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.887224 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.887266 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-562xs\" (UniqueName: \"kubernetes.io/projected/ca087dcb-6346-46ed-9750-b5548355305a-kube-api-access-562xs\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:58 crc kubenswrapper[4634]: I0929 14:17:58.887277 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ca087dcb-6346-46ed-9750-b5548355305a-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.189212 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" event={"ID":"ca087dcb-6346-46ed-9750-b5548355305a","Type":"ContainerDied","Data":"ba084854649e0ffff6ef3400627efa4287b940afbd5838bcb1e26ba640210c1a"} Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.189288 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba084854649e0ffff6ef3400627efa4287b940afbd5838bcb1e26ba640210c1a" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.189400 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-zs264" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.334227 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz"] Sep 29 14:17:59 crc kubenswrapper[4634]: E0929 14:17:59.334549 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca087dcb-6346-46ed-9750-b5548355305a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.334566 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca087dcb-6346-46ed-9750-b5548355305a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.334781 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca087dcb-6346-46ed-9750-b5548355305a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.335399 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.339164 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.339331 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.339679 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.339699 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.365339 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz"] Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.502574 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.502788 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56xr2\" (UniqueName: \"kubernetes.io/projected/332f4970-1479-4efc-8b35-e1795111b1b4-kube-api-access-56xr2\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.502984 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.605152 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.605315 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.605372 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56xr2\" (UniqueName: \"kubernetes.io/projected/332f4970-1479-4efc-8b35-e1795111b1b4-kube-api-access-56xr2\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.610198 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.610717 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.630878 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56xr2\" (UniqueName: \"kubernetes.io/projected/332f4970-1479-4efc-8b35-e1795111b1b4-kube-api-access-56xr2\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-v8shz\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:17:59 crc kubenswrapper[4634]: I0929 14:17:59.664312 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:18:00 crc kubenswrapper[4634]: I0929 14:18:00.292504 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz"] Sep 29 14:18:01 crc kubenswrapper[4634]: I0929 14:18:01.216327 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" event={"ID":"332f4970-1479-4efc-8b35-e1795111b1b4","Type":"ContainerStarted","Data":"161ae57203a9ffd03502ffdad46f26552e37df639fcd49098eed4a5914ab31cb"} Sep 29 14:18:01 crc kubenswrapper[4634]: I0929 14:18:01.216838 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" event={"ID":"332f4970-1479-4efc-8b35-e1795111b1b4","Type":"ContainerStarted","Data":"8c723d071828f53a983586b853ec5df7e4a776a7cb99d54b701b910b96e3fb03"} Sep 29 14:18:01 crc kubenswrapper[4634]: I0929 14:18:01.251418 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" podStartSLOduration=2.074714613 podStartE2EDuration="2.25139206s" podCreationTimestamp="2025-09-29 14:17:59 +0000 UTC" firstStartedPulling="2025-09-29 14:18:00.30019681 +0000 UTC m=+2010.868924559" lastFinishedPulling="2025-09-29 14:18:00.476874247 +0000 UTC m=+2011.045602006" observedRunningTime="2025-09-29 14:18:01.244352206 +0000 UTC m=+2011.813079965" watchObservedRunningTime="2025-09-29 14:18:01.25139206 +0000 UTC m=+2011.820119809" Sep 29 14:18:14 crc kubenswrapper[4634]: I0929 14:18:14.396139 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:18:14 crc kubenswrapper[4634]: I0929 14:18:14.397023 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:18:44 crc kubenswrapper[4634]: I0929 14:18:44.396690 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:18:44 crc kubenswrapper[4634]: I0929 14:18:44.399282 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:18:44 crc kubenswrapper[4634]: I0929 14:18:44.399359 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:18:44 crc kubenswrapper[4634]: I0929 14:18:44.400486 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4b3f11a3b532b31a5f39dc3954037054fe5968d53a04cc1b756a9b0a70e96890"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:18:44 crc kubenswrapper[4634]: I0929 14:18:44.400587 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://4b3f11a3b532b31a5f39dc3954037054fe5968d53a04cc1b756a9b0a70e96890" gracePeriod=600 Sep 29 14:18:44 crc kubenswrapper[4634]: I0929 14:18:44.688764 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="4b3f11a3b532b31a5f39dc3954037054fe5968d53a04cc1b756a9b0a70e96890" exitCode=0 Sep 29 14:18:44 crc kubenswrapper[4634]: I0929 14:18:44.689272 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"4b3f11a3b532b31a5f39dc3954037054fe5968d53a04cc1b756a9b0a70e96890"} Sep 29 14:18:44 crc kubenswrapper[4634]: I0929 14:18:44.689328 4634 scope.go:117] "RemoveContainer" containerID="c00149b96eddde14a24404a4001601a4675cd1d1903fe1e75ce4a041238e183a" Sep 29 14:18:45 crc kubenswrapper[4634]: I0929 14:18:45.709248 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423"} Sep 29 14:18:57 crc kubenswrapper[4634]: I0929 14:18:57.824037 4634 generic.go:334] "Generic (PLEG): container finished" podID="332f4970-1479-4efc-8b35-e1795111b1b4" containerID="161ae57203a9ffd03502ffdad46f26552e37df639fcd49098eed4a5914ab31cb" exitCode=0 Sep 29 14:18:57 crc kubenswrapper[4634]: I0929 14:18:57.824119 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" event={"ID":"332f4970-1479-4efc-8b35-e1795111b1b4","Type":"ContainerDied","Data":"161ae57203a9ffd03502ffdad46f26552e37df639fcd49098eed4a5914ab31cb"} Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.127721 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fzrjg"] Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.129789 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.155947 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fzrjg"] Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.289706 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slrgv\" (UniqueName: \"kubernetes.io/projected/d70a8849-53eb-49b7-ba63-c3e36180c5f9-kube-api-access-slrgv\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.290036 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-catalog-content\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.290134 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-utilities\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.392283 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-catalog-content\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.392340 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-utilities\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.392443 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slrgv\" (UniqueName: \"kubernetes.io/projected/d70a8849-53eb-49b7-ba63-c3e36180c5f9-kube-api-access-slrgv\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.393154 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-catalog-content\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.393277 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-utilities\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.415128 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slrgv\" (UniqueName: \"kubernetes.io/projected/d70a8849-53eb-49b7-ba63-c3e36180c5f9-kube-api-access-slrgv\") pod \"community-operators-fzrjg\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:58 crc kubenswrapper[4634]: I0929 14:18:58.454985 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.074271 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fzrjg"] Sep 29 14:18:59 crc kubenswrapper[4634]: W0929 14:18:59.091784 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd70a8849_53eb_49b7_ba63_c3e36180c5f9.slice/crio-0f03a398df1b6d325cba74a83af077e385571a0843ef4fc8196476bd7dc8b227 WatchSource:0}: Error finding container 0f03a398df1b6d325cba74a83af077e385571a0843ef4fc8196476bd7dc8b227: Status 404 returned error can't find the container with id 0f03a398df1b6d325cba74a83af077e385571a0843ef4fc8196476bd7dc8b227 Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.282841 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.414318 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-ssh-key\") pod \"332f4970-1479-4efc-8b35-e1795111b1b4\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.414928 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56xr2\" (UniqueName: \"kubernetes.io/projected/332f4970-1479-4efc-8b35-e1795111b1b4-kube-api-access-56xr2\") pod \"332f4970-1479-4efc-8b35-e1795111b1b4\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.414961 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-inventory\") pod \"332f4970-1479-4efc-8b35-e1795111b1b4\" (UID: \"332f4970-1479-4efc-8b35-e1795111b1b4\") " Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.421976 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/332f4970-1479-4efc-8b35-e1795111b1b4-kube-api-access-56xr2" (OuterVolumeSpecName: "kube-api-access-56xr2") pod "332f4970-1479-4efc-8b35-e1795111b1b4" (UID: "332f4970-1479-4efc-8b35-e1795111b1b4"). InnerVolumeSpecName "kube-api-access-56xr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.448640 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-inventory" (OuterVolumeSpecName: "inventory") pod "332f4970-1479-4efc-8b35-e1795111b1b4" (UID: "332f4970-1479-4efc-8b35-e1795111b1b4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.454051 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "332f4970-1479-4efc-8b35-e1795111b1b4" (UID: "332f4970-1479-4efc-8b35-e1795111b1b4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.517459 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.517721 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56xr2\" (UniqueName: \"kubernetes.io/projected/332f4970-1479-4efc-8b35-e1795111b1b4-kube-api-access-56xr2\") on node \"crc\" DevicePath \"\"" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.517824 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/332f4970-1479-4efc-8b35-e1795111b1b4-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.850784 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" event={"ID":"332f4970-1479-4efc-8b35-e1795111b1b4","Type":"ContainerDied","Data":"8c723d071828f53a983586b853ec5df7e4a776a7cb99d54b701b910b96e3fb03"} Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.850833 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c723d071828f53a983586b853ec5df7e4a776a7cb99d54b701b910b96e3fb03" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.850911 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-v8shz" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.876839 4634 generic.go:334] "Generic (PLEG): container finished" podID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerID="80c55c8226b3d17da0fc90c2219356dd78030c97d869d1f552f0ee16fc43132f" exitCode=0 Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.876881 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fzrjg" event={"ID":"d70a8849-53eb-49b7-ba63-c3e36180c5f9","Type":"ContainerDied","Data":"80c55c8226b3d17da0fc90c2219356dd78030c97d869d1f552f0ee16fc43132f"} Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.876905 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fzrjg" event={"ID":"d70a8849-53eb-49b7-ba63-c3e36180c5f9","Type":"ContainerStarted","Data":"0f03a398df1b6d325cba74a83af077e385571a0843ef4fc8196476bd7dc8b227"} Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.879412 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.988417 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-9fkzs"] Sep 29 14:18:59 crc kubenswrapper[4634]: E0929 14:18:59.989043 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="332f4970-1479-4efc-8b35-e1795111b1b4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.989137 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="332f4970-1479-4efc-8b35-e1795111b1b4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.989371 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="332f4970-1479-4efc-8b35-e1795111b1b4" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.990005 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.997555 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.997985 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.998246 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:18:59 crc kubenswrapper[4634]: I0929 14:18:59.998453 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.006772 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-9fkzs"] Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.163269 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpjcn\" (UniqueName: \"kubernetes.io/projected/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-kube-api-access-vpjcn\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.164537 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.164768 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.266831 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.266998 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpjcn\" (UniqueName: \"kubernetes.io/projected/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-kube-api-access-vpjcn\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.267025 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.271744 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.282622 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.294842 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpjcn\" (UniqueName: \"kubernetes.io/projected/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-kube-api-access-vpjcn\") pod \"ssh-known-hosts-edpm-deployment-9fkzs\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.316500 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.888235 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fzrjg" event={"ID":"d70a8849-53eb-49b7-ba63-c3e36180c5f9","Type":"ContainerStarted","Data":"fb257002ecf28e979a44a2668ef1ee8c5eb2de371b386ee061bfefd43c276e1f"} Sep 29 14:19:00 crc kubenswrapper[4634]: I0929 14:19:00.945048 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-9fkzs"] Sep 29 14:19:00 crc kubenswrapper[4634]: W0929 14:19:00.952386 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee18fe6a_41c7_471a_8f99_0cec5b0a2676.slice/crio-27424e5cea34e599446b7d46b21360915b519f61f28df942e9581c53a5752237 WatchSource:0}: Error finding container 27424e5cea34e599446b7d46b21360915b519f61f28df942e9581c53a5752237: Status 404 returned error can't find the container with id 27424e5cea34e599446b7d46b21360915b519f61f28df942e9581c53a5752237 Sep 29 14:19:01 crc kubenswrapper[4634]: I0929 14:19:01.902936 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" event={"ID":"ee18fe6a-41c7-471a-8f99-0cec5b0a2676","Type":"ContainerStarted","Data":"590e38e2c5b34db74f5bf3225a3ec96a75a731e7a77dfd752444057f0d355e12"} Sep 29 14:19:01 crc kubenswrapper[4634]: I0929 14:19:01.902984 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" event={"ID":"ee18fe6a-41c7-471a-8f99-0cec5b0a2676","Type":"ContainerStarted","Data":"27424e5cea34e599446b7d46b21360915b519f61f28df942e9581c53a5752237"} Sep 29 14:19:01 crc kubenswrapper[4634]: I0929 14:19:01.922826 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" podStartSLOduration=2.771562432 podStartE2EDuration="2.922792789s" podCreationTimestamp="2025-09-29 14:18:59 +0000 UTC" firstStartedPulling="2025-09-29 14:19:00.955134102 +0000 UTC m=+2071.523861851" lastFinishedPulling="2025-09-29 14:19:01.106364459 +0000 UTC m=+2071.675092208" observedRunningTime="2025-09-29 14:19:01.919890581 +0000 UTC m=+2072.488618330" watchObservedRunningTime="2025-09-29 14:19:01.922792789 +0000 UTC m=+2072.491520538" Sep 29 14:19:02 crc kubenswrapper[4634]: I0929 14:19:02.911720 4634 generic.go:334] "Generic (PLEG): container finished" podID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerID="fb257002ecf28e979a44a2668ef1ee8c5eb2de371b386ee061bfefd43c276e1f" exitCode=0 Sep 29 14:19:02 crc kubenswrapper[4634]: I0929 14:19:02.911802 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fzrjg" event={"ID":"d70a8849-53eb-49b7-ba63-c3e36180c5f9","Type":"ContainerDied","Data":"fb257002ecf28e979a44a2668ef1ee8c5eb2de371b386ee061bfefd43c276e1f"} Sep 29 14:19:03 crc kubenswrapper[4634]: I0929 14:19:03.926007 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fzrjg" event={"ID":"d70a8849-53eb-49b7-ba63-c3e36180c5f9","Type":"ContainerStarted","Data":"59682e3932ef65adcadff13525ffabd9413a8949be29c91aa745095842eb19b4"} Sep 29 14:19:03 crc kubenswrapper[4634]: I0929 14:19:03.947632 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fzrjg" podStartSLOduration=2.454731551 podStartE2EDuration="5.947615361s" podCreationTimestamp="2025-09-29 14:18:58 +0000 UTC" firstStartedPulling="2025-09-29 14:18:59.878995922 +0000 UTC m=+2070.447723661" lastFinishedPulling="2025-09-29 14:19:03.371879722 +0000 UTC m=+2073.940607471" observedRunningTime="2025-09-29 14:19:03.945234301 +0000 UTC m=+2074.513962060" watchObservedRunningTime="2025-09-29 14:19:03.947615361 +0000 UTC m=+2074.516343100" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.677273 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hpl9z"] Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.682361 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.693735 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hpl9z"] Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.799939 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r964p\" (UniqueName: \"kubernetes.io/projected/1800e744-9782-43f4-883a-e435f06163f0-kube-api-access-r964p\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.800113 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-catalog-content\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.800135 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-utilities\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.902066 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r964p\" (UniqueName: \"kubernetes.io/projected/1800e744-9782-43f4-883a-e435f06163f0-kube-api-access-r964p\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.902614 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-catalog-content\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.902710 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-utilities\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.903391 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-catalog-content\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.903419 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-utilities\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:05 crc kubenswrapper[4634]: I0929 14:19:05.924760 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r964p\" (UniqueName: \"kubernetes.io/projected/1800e744-9782-43f4-883a-e435f06163f0-kube-api-access-r964p\") pod \"redhat-operators-hpl9z\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:06 crc kubenswrapper[4634]: I0929 14:19:06.018624 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:06 crc kubenswrapper[4634]: I0929 14:19:06.530834 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hpl9z"] Sep 29 14:19:06 crc kubenswrapper[4634]: I0929 14:19:06.967639 4634 generic.go:334] "Generic (PLEG): container finished" podID="1800e744-9782-43f4-883a-e435f06163f0" containerID="6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52" exitCode=0 Sep 29 14:19:06 crc kubenswrapper[4634]: I0929 14:19:06.967721 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpl9z" event={"ID":"1800e744-9782-43f4-883a-e435f06163f0","Type":"ContainerDied","Data":"6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52"} Sep 29 14:19:06 crc kubenswrapper[4634]: I0929 14:19:06.968013 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpl9z" event={"ID":"1800e744-9782-43f4-883a-e435f06163f0","Type":"ContainerStarted","Data":"8c0e600002a1e7fff24b24cce547ea163c17400dc99aa5c79d6fc780f7039629"} Sep 29 14:19:07 crc kubenswrapper[4634]: I0929 14:19:07.981722 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpl9z" event={"ID":"1800e744-9782-43f4-883a-e435f06163f0","Type":"ContainerStarted","Data":"62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41"} Sep 29 14:19:08 crc kubenswrapper[4634]: I0929 14:19:08.456476 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:19:08 crc kubenswrapper[4634]: I0929 14:19:08.456945 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:19:08 crc kubenswrapper[4634]: I0929 14:19:08.514765 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:19:09 crc kubenswrapper[4634]: I0929 14:19:09.044185 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:19:10 crc kubenswrapper[4634]: I0929 14:19:10.004528 4634 generic.go:334] "Generic (PLEG): container finished" podID="ee18fe6a-41c7-471a-8f99-0cec5b0a2676" containerID="590e38e2c5b34db74f5bf3225a3ec96a75a731e7a77dfd752444057f0d355e12" exitCode=0 Sep 29 14:19:10 crc kubenswrapper[4634]: I0929 14:19:10.004742 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" event={"ID":"ee18fe6a-41c7-471a-8f99-0cec5b0a2676","Type":"ContainerDied","Data":"590e38e2c5b34db74f5bf3225a3ec96a75a731e7a77dfd752444057f0d355e12"} Sep 29 14:19:10 crc kubenswrapper[4634]: I0929 14:19:10.686241 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fzrjg"] Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.015959 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fzrjg" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerName="registry-server" containerID="cri-o://59682e3932ef65adcadff13525ffabd9413a8949be29c91aa745095842eb19b4" gracePeriod=2 Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.553201 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.733329 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpjcn\" (UniqueName: \"kubernetes.io/projected/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-kube-api-access-vpjcn\") pod \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.733448 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-ssh-key-openstack-edpm-ipam\") pod \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.733481 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-inventory-0\") pod \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\" (UID: \"ee18fe6a-41c7-471a-8f99-0cec5b0a2676\") " Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.810062 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-kube-api-access-vpjcn" (OuterVolumeSpecName: "kube-api-access-vpjcn") pod "ee18fe6a-41c7-471a-8f99-0cec5b0a2676" (UID: "ee18fe6a-41c7-471a-8f99-0cec5b0a2676"). InnerVolumeSpecName "kube-api-access-vpjcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.823458 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "ee18fe6a-41c7-471a-8f99-0cec5b0a2676" (UID: "ee18fe6a-41c7-471a-8f99-0cec5b0a2676"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.828068 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ee18fe6a-41c7-471a-8f99-0cec5b0a2676" (UID: "ee18fe6a-41c7-471a-8f99-0cec5b0a2676"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.836453 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpjcn\" (UniqueName: \"kubernetes.io/projected/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-kube-api-access-vpjcn\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.836513 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:11 crc kubenswrapper[4634]: I0929 14:19:11.836530 4634 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ee18fe6a-41c7-471a-8f99-0cec5b0a2676-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.027792 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fzrjg" event={"ID":"d70a8849-53eb-49b7-ba63-c3e36180c5f9","Type":"ContainerDied","Data":"59682e3932ef65adcadff13525ffabd9413a8949be29c91aa745095842eb19b4"} Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.028075 4634 generic.go:334] "Generic (PLEG): container finished" podID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerID="59682e3932ef65adcadff13525ffabd9413a8949be29c91aa745095842eb19b4" exitCode=0 Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.031452 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" event={"ID":"ee18fe6a-41c7-471a-8f99-0cec5b0a2676","Type":"ContainerDied","Data":"27424e5cea34e599446b7d46b21360915b519f61f28df942e9581c53a5752237"} Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.031487 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27424e5cea34e599446b7d46b21360915b519f61f28df942e9581c53a5752237" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.031540 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-9fkzs" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.046709 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.148233 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-catalog-content\") pod \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.148588 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-utilities\") pod \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.148759 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slrgv\" (UniqueName: \"kubernetes.io/projected/d70a8849-53eb-49b7-ba63-c3e36180c5f9-kube-api-access-slrgv\") pod \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\" (UID: \"d70a8849-53eb-49b7-ba63-c3e36180c5f9\") " Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.156800 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-utilities" (OuterVolumeSpecName: "utilities") pod "d70a8849-53eb-49b7-ba63-c3e36180c5f9" (UID: "d70a8849-53eb-49b7-ba63-c3e36180c5f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.175798 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d70a8849-53eb-49b7-ba63-c3e36180c5f9-kube-api-access-slrgv" (OuterVolumeSpecName: "kube-api-access-slrgv") pod "d70a8849-53eb-49b7-ba63-c3e36180c5f9" (UID: "d70a8849-53eb-49b7-ba63-c3e36180c5f9"). InnerVolumeSpecName "kube-api-access-slrgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.227883 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d70a8849-53eb-49b7-ba63-c3e36180c5f9" (UID: "d70a8849-53eb-49b7-ba63-c3e36180c5f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.251853 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slrgv\" (UniqueName: \"kubernetes.io/projected/d70a8849-53eb-49b7-ba63-c3e36180c5f9-kube-api-access-slrgv\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.251922 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.251944 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d70a8849-53eb-49b7-ba63-c3e36180c5f9-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.291652 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4"] Sep 29 14:19:12 crc kubenswrapper[4634]: E0929 14:19:12.292132 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerName="extract-content" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.292149 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerName="extract-content" Sep 29 14:19:12 crc kubenswrapper[4634]: E0929 14:19:12.292182 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerName="registry-server" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.292189 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerName="registry-server" Sep 29 14:19:12 crc kubenswrapper[4634]: E0929 14:19:12.292198 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee18fe6a-41c7-471a-8f99-0cec5b0a2676" containerName="ssh-known-hosts-edpm-deployment" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.292204 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee18fe6a-41c7-471a-8f99-0cec5b0a2676" containerName="ssh-known-hosts-edpm-deployment" Sep 29 14:19:12 crc kubenswrapper[4634]: E0929 14:19:12.292239 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerName="extract-utilities" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.292246 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerName="extract-utilities" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.292445 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" containerName="registry-server" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.292468 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee18fe6a-41c7-471a-8f99-0cec5b0a2676" containerName="ssh-known-hosts-edpm-deployment" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.293020 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4"] Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.293133 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.295442 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.297537 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.297719 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.298021 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.456309 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.456407 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.456492 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qg5pm\" (UniqueName: \"kubernetes.io/projected/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-kube-api-access-qg5pm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.558519 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.559041 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.559137 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qg5pm\" (UniqueName: \"kubernetes.io/projected/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-kube-api-access-qg5pm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.565376 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.567295 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.593008 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qg5pm\" (UniqueName: \"kubernetes.io/projected/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-kube-api-access-qg5pm\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-b7fm4\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:12 crc kubenswrapper[4634]: I0929 14:19:12.627293 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.047374 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fzrjg" Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.047365 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fzrjg" event={"ID":"d70a8849-53eb-49b7-ba63-c3e36180c5f9","Type":"ContainerDied","Data":"0f03a398df1b6d325cba74a83af077e385571a0843ef4fc8196476bd7dc8b227"} Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.047952 4634 scope.go:117] "RemoveContainer" containerID="59682e3932ef65adcadff13525ffabd9413a8949be29c91aa745095842eb19b4" Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.051406 4634 generic.go:334] "Generic (PLEG): container finished" podID="1800e744-9782-43f4-883a-e435f06163f0" containerID="62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41" exitCode=0 Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.051445 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpl9z" event={"ID":"1800e744-9782-43f4-883a-e435f06163f0","Type":"ContainerDied","Data":"62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41"} Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.090934 4634 scope.go:117] "RemoveContainer" containerID="fb257002ecf28e979a44a2668ef1ee8c5eb2de371b386ee061bfefd43c276e1f" Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.116108 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fzrjg"] Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.132627 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fzrjg"] Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.148897 4634 scope.go:117] "RemoveContainer" containerID="80c55c8226b3d17da0fc90c2219356dd78030c97d869d1f552f0ee16fc43132f" Sep 29 14:19:13 crc kubenswrapper[4634]: I0929 14:19:13.259059 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4"] Sep 29 14:19:14 crc kubenswrapper[4634]: I0929 14:19:14.065490 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" event={"ID":"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01","Type":"ContainerStarted","Data":"4a8e4e36e312ec2f69207eb326bcedcb11e5eba67f04b6745b303b976a945a76"} Sep 29 14:19:14 crc kubenswrapper[4634]: I0929 14:19:14.065903 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" event={"ID":"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01","Type":"ContainerStarted","Data":"690f2640a257cf55c80a40d5b914130882a18bf4a7651b57cb89a94c43657bac"} Sep 29 14:19:14 crc kubenswrapper[4634]: I0929 14:19:14.067824 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpl9z" event={"ID":"1800e744-9782-43f4-883a-e435f06163f0","Type":"ContainerStarted","Data":"1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9"} Sep 29 14:19:14 crc kubenswrapper[4634]: I0929 14:19:14.102705 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" podStartSLOduration=1.9128781639999999 podStartE2EDuration="2.10267681s" podCreationTimestamp="2025-09-29 14:19:12 +0000 UTC" firstStartedPulling="2025-09-29 14:19:13.27612798 +0000 UTC m=+2083.844855729" lastFinishedPulling="2025-09-29 14:19:13.465926626 +0000 UTC m=+2084.034654375" observedRunningTime="2025-09-29 14:19:14.083208387 +0000 UTC m=+2084.651936136" watchObservedRunningTime="2025-09-29 14:19:14.10267681 +0000 UTC m=+2084.671404559" Sep 29 14:19:14 crc kubenswrapper[4634]: I0929 14:19:14.123677 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d70a8849-53eb-49b7-ba63-c3e36180c5f9" path="/var/lib/kubelet/pods/d70a8849-53eb-49b7-ba63-c3e36180c5f9/volumes" Sep 29 14:19:16 crc kubenswrapper[4634]: I0929 14:19:16.020270 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:16 crc kubenswrapper[4634]: I0929 14:19:16.021039 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:17 crc kubenswrapper[4634]: I0929 14:19:17.071481 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hpl9z" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="registry-server" probeResult="failure" output=< Sep 29 14:19:17 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:19:17 crc kubenswrapper[4634]: > Sep 29 14:19:23 crc kubenswrapper[4634]: I0929 14:19:23.165072 4634 generic.go:334] "Generic (PLEG): container finished" podID="c35a01f0-4d25-41ad-8eff-9d65bbb2fa01" containerID="4a8e4e36e312ec2f69207eb326bcedcb11e5eba67f04b6745b303b976a945a76" exitCode=0 Sep 29 14:19:23 crc kubenswrapper[4634]: I0929 14:19:23.165669 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" event={"ID":"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01","Type":"ContainerDied","Data":"4a8e4e36e312ec2f69207eb326bcedcb11e5eba67f04b6745b303b976a945a76"} Sep 29 14:19:23 crc kubenswrapper[4634]: I0929 14:19:23.185351 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hpl9z" podStartSLOduration=11.666831703 podStartE2EDuration="18.185324302s" podCreationTimestamp="2025-09-29 14:19:05 +0000 UTC" firstStartedPulling="2025-09-29 14:19:06.970641524 +0000 UTC m=+2077.539369273" lastFinishedPulling="2025-09-29 14:19:13.489134123 +0000 UTC m=+2084.057861872" observedRunningTime="2025-09-29 14:19:14.102927247 +0000 UTC m=+2084.671654996" watchObservedRunningTime="2025-09-29 14:19:23.185324302 +0000 UTC m=+2093.754052091" Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.575440 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.718165 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-inventory\") pod \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.718421 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-ssh-key\") pod \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.718484 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5pm\" (UniqueName: \"kubernetes.io/projected/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-kube-api-access-qg5pm\") pod \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\" (UID: \"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01\") " Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.726765 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-kube-api-access-qg5pm" (OuterVolumeSpecName: "kube-api-access-qg5pm") pod "c35a01f0-4d25-41ad-8eff-9d65bbb2fa01" (UID: "c35a01f0-4d25-41ad-8eff-9d65bbb2fa01"). InnerVolumeSpecName "kube-api-access-qg5pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.749767 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-inventory" (OuterVolumeSpecName: "inventory") pod "c35a01f0-4d25-41ad-8eff-9d65bbb2fa01" (UID: "c35a01f0-4d25-41ad-8eff-9d65bbb2fa01"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.763942 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c35a01f0-4d25-41ad-8eff-9d65bbb2fa01" (UID: "c35a01f0-4d25-41ad-8eff-9d65bbb2fa01"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.821197 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.821248 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:24 crc kubenswrapper[4634]: I0929 14:19:24.821261 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5pm\" (UniqueName: \"kubernetes.io/projected/c35a01f0-4d25-41ad-8eff-9d65bbb2fa01-kube-api-access-qg5pm\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.185686 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" event={"ID":"c35a01f0-4d25-41ad-8eff-9d65bbb2fa01","Type":"ContainerDied","Data":"690f2640a257cf55c80a40d5b914130882a18bf4a7651b57cb89a94c43657bac"} Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.185721 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="690f2640a257cf55c80a40d5b914130882a18bf4a7651b57cb89a94c43657bac" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.185743 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-b7fm4" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.270619 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk"] Sep 29 14:19:25 crc kubenswrapper[4634]: E0929 14:19:25.271046 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c35a01f0-4d25-41ad-8eff-9d65bbb2fa01" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.271071 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="c35a01f0-4d25-41ad-8eff-9d65bbb2fa01" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.271354 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="c35a01f0-4d25-41ad-8eff-9d65bbb2fa01" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.272146 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.274904 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.276046 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.277157 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.283339 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk"] Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.284737 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.431807 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.431918 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnb4s\" (UniqueName: \"kubernetes.io/projected/652c8902-8b97-4a81-8c05-10b0702d1c68-kube-api-access-fnb4s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.431995 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.534273 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.534448 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.534785 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnb4s\" (UniqueName: \"kubernetes.io/projected/652c8902-8b97-4a81-8c05-10b0702d1c68-kube-api-access-fnb4s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.544880 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.544936 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.553373 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnb4s\" (UniqueName: \"kubernetes.io/projected/652c8902-8b97-4a81-8c05-10b0702d1c68-kube-api-access-fnb4s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:25 crc kubenswrapper[4634]: I0929 14:19:25.591334 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:26 crc kubenswrapper[4634]: I0929 14:19:26.074567 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:26 crc kubenswrapper[4634]: I0929 14:19:26.131798 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:26 crc kubenswrapper[4634]: I0929 14:19:26.221460 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk"] Sep 29 14:19:26 crc kubenswrapper[4634]: I0929 14:19:26.321874 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hpl9z"] Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.202254 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hpl9z" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="registry-server" containerID="cri-o://1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9" gracePeriod=2 Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.203200 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" event={"ID":"652c8902-8b97-4a81-8c05-10b0702d1c68","Type":"ContainerStarted","Data":"ea0984dc57c1b1453df7313fa422d808a6139f9895ecb3df0bbeee0da974a912"} Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.203226 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" event={"ID":"652c8902-8b97-4a81-8c05-10b0702d1c68","Type":"ContainerStarted","Data":"44e0b81067075eb5687f8ef614516be4d965809092825d6e2e0ccd3bd8103d76"} Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.657749 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.680666 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" podStartSLOduration=2.502452437 podStartE2EDuration="2.680642099s" podCreationTimestamp="2025-09-29 14:19:25 +0000 UTC" firstStartedPulling="2025-09-29 14:19:26.226237409 +0000 UTC m=+2096.794965158" lastFinishedPulling="2025-09-29 14:19:26.404427071 +0000 UTC m=+2096.973154820" observedRunningTime="2025-09-29 14:19:27.221490641 +0000 UTC m=+2097.790218390" watchObservedRunningTime="2025-09-29 14:19:27.680642099 +0000 UTC m=+2098.249369858" Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.775729 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-utilities\") pod \"1800e744-9782-43f4-883a-e435f06163f0\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.775833 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-catalog-content\") pod \"1800e744-9782-43f4-883a-e435f06163f0\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.776619 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r964p\" (UniqueName: \"kubernetes.io/projected/1800e744-9782-43f4-883a-e435f06163f0-kube-api-access-r964p\") pod \"1800e744-9782-43f4-883a-e435f06163f0\" (UID: \"1800e744-9782-43f4-883a-e435f06163f0\") " Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.776653 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-utilities" (OuterVolumeSpecName: "utilities") pod "1800e744-9782-43f4-883a-e435f06163f0" (UID: "1800e744-9782-43f4-883a-e435f06163f0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.777520 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.783474 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1800e744-9782-43f4-883a-e435f06163f0-kube-api-access-r964p" (OuterVolumeSpecName: "kube-api-access-r964p") pod "1800e744-9782-43f4-883a-e435f06163f0" (UID: "1800e744-9782-43f4-883a-e435f06163f0"). InnerVolumeSpecName "kube-api-access-r964p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.866410 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1800e744-9782-43f4-883a-e435f06163f0" (UID: "1800e744-9782-43f4-883a-e435f06163f0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.879231 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1800e744-9782-43f4-883a-e435f06163f0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:27 crc kubenswrapper[4634]: I0929 14:19:27.879265 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r964p\" (UniqueName: \"kubernetes.io/projected/1800e744-9782-43f4-883a-e435f06163f0-kube-api-access-r964p\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.214719 4634 generic.go:334] "Generic (PLEG): container finished" podID="1800e744-9782-43f4-883a-e435f06163f0" containerID="1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9" exitCode=0 Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.214771 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hpl9z" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.214810 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpl9z" event={"ID":"1800e744-9782-43f4-883a-e435f06163f0","Type":"ContainerDied","Data":"1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9"} Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.214847 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hpl9z" event={"ID":"1800e744-9782-43f4-883a-e435f06163f0","Type":"ContainerDied","Data":"8c0e600002a1e7fff24b24cce547ea163c17400dc99aa5c79d6fc780f7039629"} Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.214871 4634 scope.go:117] "RemoveContainer" containerID="1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.239881 4634 scope.go:117] "RemoveContainer" containerID="62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.246386 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hpl9z"] Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.256168 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hpl9z"] Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.289857 4634 scope.go:117] "RemoveContainer" containerID="6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.324580 4634 scope.go:117] "RemoveContainer" containerID="1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9" Sep 29 14:19:28 crc kubenswrapper[4634]: E0929 14:19:28.325227 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9\": container with ID starting with 1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9 not found: ID does not exist" containerID="1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.325291 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9"} err="failed to get container status \"1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9\": rpc error: code = NotFound desc = could not find container \"1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9\": container with ID starting with 1ef7cbf40d4bc92ed8230f5cc9cacca29075c0c71597e3cbc2e9918ac78abac9 not found: ID does not exist" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.325318 4634 scope.go:117] "RemoveContainer" containerID="62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41" Sep 29 14:19:28 crc kubenswrapper[4634]: E0929 14:19:28.325700 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41\": container with ID starting with 62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41 not found: ID does not exist" containerID="62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.325745 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41"} err="failed to get container status \"62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41\": rpc error: code = NotFound desc = could not find container \"62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41\": container with ID starting with 62656478983b75c39503908e439868bb7e031b583be3f0b57f470a2a9e505b41 not found: ID does not exist" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.325777 4634 scope.go:117] "RemoveContainer" containerID="6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52" Sep 29 14:19:28 crc kubenswrapper[4634]: E0929 14:19:28.326150 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52\": container with ID starting with 6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52 not found: ID does not exist" containerID="6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52" Sep 29 14:19:28 crc kubenswrapper[4634]: I0929 14:19:28.326208 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52"} err="failed to get container status \"6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52\": rpc error: code = NotFound desc = could not find container \"6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52\": container with ID starting with 6a228dce7d17b3fc224e4c7faf5f6b6f5cca4acd6e718809644f2c8f6fda3c52 not found: ID does not exist" Sep 29 14:19:30 crc kubenswrapper[4634]: I0929 14:19:30.131686 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1800e744-9782-43f4-883a-e435f06163f0" path="/var/lib/kubelet/pods/1800e744-9782-43f4-883a-e435f06163f0/volumes" Sep 29 14:19:33 crc kubenswrapper[4634]: I0929 14:19:33.960735 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tb8v6"] Sep 29 14:19:33 crc kubenswrapper[4634]: E0929 14:19:33.961759 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="registry-server" Sep 29 14:19:33 crc kubenswrapper[4634]: I0929 14:19:33.961778 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="registry-server" Sep 29 14:19:33 crc kubenswrapper[4634]: E0929 14:19:33.961793 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="extract-utilities" Sep 29 14:19:33 crc kubenswrapper[4634]: I0929 14:19:33.961802 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="extract-utilities" Sep 29 14:19:33 crc kubenswrapper[4634]: E0929 14:19:33.961833 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="extract-content" Sep 29 14:19:33 crc kubenswrapper[4634]: I0929 14:19:33.961842 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="extract-content" Sep 29 14:19:33 crc kubenswrapper[4634]: I0929 14:19:33.962086 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1800e744-9782-43f4-883a-e435f06163f0" containerName="registry-server" Sep 29 14:19:33 crc kubenswrapper[4634]: I0929 14:19:33.963832 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:33 crc kubenswrapper[4634]: I0929 14:19:33.983401 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tb8v6"] Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.114239 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-catalog-content\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.114307 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m67ts\" (UniqueName: \"kubernetes.io/projected/7d150b67-86a8-48aa-9c30-3e6687d692e2-kube-api-access-m67ts\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.114362 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-utilities\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.216029 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-catalog-content\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.216114 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m67ts\" (UniqueName: \"kubernetes.io/projected/7d150b67-86a8-48aa-9c30-3e6687d692e2-kube-api-access-m67ts\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.216176 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-utilities\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.216762 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-catalog-content\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.216798 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-utilities\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.242064 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m67ts\" (UniqueName: \"kubernetes.io/projected/7d150b67-86a8-48aa-9c30-3e6687d692e2-kube-api-access-m67ts\") pod \"certified-operators-tb8v6\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.287726 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:34 crc kubenswrapper[4634]: I0929 14:19:34.728143 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tb8v6"] Sep 29 14:19:35 crc kubenswrapper[4634]: I0929 14:19:35.302220 4634 generic.go:334] "Generic (PLEG): container finished" podID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerID="c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d" exitCode=0 Sep 29 14:19:35 crc kubenswrapper[4634]: I0929 14:19:35.302763 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tb8v6" event={"ID":"7d150b67-86a8-48aa-9c30-3e6687d692e2","Type":"ContainerDied","Data":"c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d"} Sep 29 14:19:35 crc kubenswrapper[4634]: I0929 14:19:35.302819 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tb8v6" event={"ID":"7d150b67-86a8-48aa-9c30-3e6687d692e2","Type":"ContainerStarted","Data":"c7b64f6babd4c06a10e3315ebcc5af31ec9441b6f29c154da81dfdd95a868ba3"} Sep 29 14:19:37 crc kubenswrapper[4634]: I0929 14:19:37.329460 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tb8v6" event={"ID":"7d150b67-86a8-48aa-9c30-3e6687d692e2","Type":"ContainerStarted","Data":"84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438"} Sep 29 14:19:37 crc kubenswrapper[4634]: I0929 14:19:37.332961 4634 generic.go:334] "Generic (PLEG): container finished" podID="652c8902-8b97-4a81-8c05-10b0702d1c68" containerID="ea0984dc57c1b1453df7313fa422d808a6139f9895ecb3df0bbeee0da974a912" exitCode=0 Sep 29 14:19:37 crc kubenswrapper[4634]: I0929 14:19:37.333024 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" event={"ID":"652c8902-8b97-4a81-8c05-10b0702d1c68","Type":"ContainerDied","Data":"ea0984dc57c1b1453df7313fa422d808a6139f9895ecb3df0bbeee0da974a912"} Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.348389 4634 generic.go:334] "Generic (PLEG): container finished" podID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerID="84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438" exitCode=0 Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.349325 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tb8v6" event={"ID":"7d150b67-86a8-48aa-9c30-3e6687d692e2","Type":"ContainerDied","Data":"84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438"} Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.767472 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.930282 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnb4s\" (UniqueName: \"kubernetes.io/projected/652c8902-8b97-4a81-8c05-10b0702d1c68-kube-api-access-fnb4s\") pod \"652c8902-8b97-4a81-8c05-10b0702d1c68\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.930406 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-inventory\") pod \"652c8902-8b97-4a81-8c05-10b0702d1c68\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.930602 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-ssh-key\") pod \"652c8902-8b97-4a81-8c05-10b0702d1c68\" (UID: \"652c8902-8b97-4a81-8c05-10b0702d1c68\") " Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.947513 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/652c8902-8b97-4a81-8c05-10b0702d1c68-kube-api-access-fnb4s" (OuterVolumeSpecName: "kube-api-access-fnb4s") pod "652c8902-8b97-4a81-8c05-10b0702d1c68" (UID: "652c8902-8b97-4a81-8c05-10b0702d1c68"). InnerVolumeSpecName "kube-api-access-fnb4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.982261 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-inventory" (OuterVolumeSpecName: "inventory") pod "652c8902-8b97-4a81-8c05-10b0702d1c68" (UID: "652c8902-8b97-4a81-8c05-10b0702d1c68"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:19:38 crc kubenswrapper[4634]: I0929 14:19:38.983365 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "652c8902-8b97-4a81-8c05-10b0702d1c68" (UID: "652c8902-8b97-4a81-8c05-10b0702d1c68"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.033549 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnb4s\" (UniqueName: \"kubernetes.io/projected/652c8902-8b97-4a81-8c05-10b0702d1c68-kube-api-access-fnb4s\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.033590 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.033601 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/652c8902-8b97-4a81-8c05-10b0702d1c68-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.362801 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tb8v6" event={"ID":"7d150b67-86a8-48aa-9c30-3e6687d692e2","Type":"ContainerStarted","Data":"25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0"} Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.364587 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" event={"ID":"652c8902-8b97-4a81-8c05-10b0702d1c68","Type":"ContainerDied","Data":"44e0b81067075eb5687f8ef614516be4d965809092825d6e2e0ccd3bd8103d76"} Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.364615 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44e0b81067075eb5687f8ef614516be4d965809092825d6e2e0ccd3bd8103d76" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.364702 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.403669 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tb8v6" podStartSLOduration=2.955695792 podStartE2EDuration="6.403635656s" podCreationTimestamp="2025-09-29 14:19:33 +0000 UTC" firstStartedPulling="2025-09-29 14:19:35.309637932 +0000 UTC m=+2105.878365681" lastFinishedPulling="2025-09-29 14:19:38.757577786 +0000 UTC m=+2109.326305545" observedRunningTime="2025-09-29 14:19:39.392174286 +0000 UTC m=+2109.960902045" watchObservedRunningTime="2025-09-29 14:19:39.403635656 +0000 UTC m=+2109.972363405" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.538307 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb"] Sep 29 14:19:39 crc kubenswrapper[4634]: E0929 14:19:39.538997 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="652c8902-8b97-4a81-8c05-10b0702d1c68" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.539036 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="652c8902-8b97-4a81-8c05-10b0702d1c68" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.539404 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="652c8902-8b97-4a81-8c05-10b0702d1c68" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.541003 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.544328 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.544500 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.544616 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.545090 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.545650 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.545852 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.548386 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.548530 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.564264 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb"] Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.649909 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.650308 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.650478 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.650506 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.650633 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.650679 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.650794 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.650859 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.650988 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.651058 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.651282 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.651355 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.651378 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62jwl\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-kube-api-access-62jwl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.651427 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.754240 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755165 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755201 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62jwl\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-kube-api-access-62jwl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755240 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755282 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755333 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755419 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755447 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755501 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755537 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755590 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755632 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755679 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.755712 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.762309 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.764448 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.764495 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.765397 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.767489 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.769163 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.771989 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.774474 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.774482 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.774596 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.776695 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62jwl\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-kube-api-access-62jwl\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.783239 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.783889 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.787463 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:39 crc kubenswrapper[4634]: I0929 14:19:39.867762 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:19:40 crc kubenswrapper[4634]: I0929 14:19:40.482627 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb"] Sep 29 14:19:41 crc kubenswrapper[4634]: I0929 14:19:41.399776 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" event={"ID":"c2fcbf70-369d-41cd-8187-7e26848b9171","Type":"ContainerStarted","Data":"00680b619cc55944a10a6530db64e4072c97443011babe70a9e6278dca8ccc9a"} Sep 29 14:19:41 crc kubenswrapper[4634]: I0929 14:19:41.400258 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" event={"ID":"c2fcbf70-369d-41cd-8187-7e26848b9171","Type":"ContainerStarted","Data":"ae88494d3e3f030df6735b45a0973cf920653c45ed9d60927837fed10123e920"} Sep 29 14:19:41 crc kubenswrapper[4634]: I0929 14:19:41.443415 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" podStartSLOduration=2.274232147 podStartE2EDuration="2.44338436s" podCreationTimestamp="2025-09-29 14:19:39 +0000 UTC" firstStartedPulling="2025-09-29 14:19:40.488203361 +0000 UTC m=+2111.056931110" lastFinishedPulling="2025-09-29 14:19:40.657355574 +0000 UTC m=+2111.226083323" observedRunningTime="2025-09-29 14:19:41.426073571 +0000 UTC m=+2111.994801380" watchObservedRunningTime="2025-09-29 14:19:41.44338436 +0000 UTC m=+2112.012112149" Sep 29 14:19:44 crc kubenswrapper[4634]: I0929 14:19:44.288755 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:44 crc kubenswrapper[4634]: I0929 14:19:44.290058 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:44 crc kubenswrapper[4634]: I0929 14:19:44.339386 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:44 crc kubenswrapper[4634]: I0929 14:19:44.502185 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:44 crc kubenswrapper[4634]: I0929 14:19:44.574830 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tb8v6"] Sep 29 14:19:46 crc kubenswrapper[4634]: I0929 14:19:46.447713 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tb8v6" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerName="registry-server" containerID="cri-o://25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0" gracePeriod=2 Sep 29 14:19:46 crc kubenswrapper[4634]: I0929 14:19:46.938947 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.053423 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-catalog-content\") pod \"7d150b67-86a8-48aa-9c30-3e6687d692e2\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.053647 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-utilities\") pod \"7d150b67-86a8-48aa-9c30-3e6687d692e2\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.053759 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m67ts\" (UniqueName: \"kubernetes.io/projected/7d150b67-86a8-48aa-9c30-3e6687d692e2-kube-api-access-m67ts\") pod \"7d150b67-86a8-48aa-9c30-3e6687d692e2\" (UID: \"7d150b67-86a8-48aa-9c30-3e6687d692e2\") " Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.056972 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-utilities" (OuterVolumeSpecName: "utilities") pod "7d150b67-86a8-48aa-9c30-3e6687d692e2" (UID: "7d150b67-86a8-48aa-9c30-3e6687d692e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.062312 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d150b67-86a8-48aa-9c30-3e6687d692e2-kube-api-access-m67ts" (OuterVolumeSpecName: "kube-api-access-m67ts") pod "7d150b67-86a8-48aa-9c30-3e6687d692e2" (UID: "7d150b67-86a8-48aa-9c30-3e6687d692e2"). InnerVolumeSpecName "kube-api-access-m67ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.157467 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.157504 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m67ts\" (UniqueName: \"kubernetes.io/projected/7d150b67-86a8-48aa-9c30-3e6687d692e2-kube-api-access-m67ts\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.212520 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7d150b67-86a8-48aa-9c30-3e6687d692e2" (UID: "7d150b67-86a8-48aa-9c30-3e6687d692e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.260050 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d150b67-86a8-48aa-9c30-3e6687d692e2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.461477 4634 generic.go:334] "Generic (PLEG): container finished" podID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerID="25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0" exitCode=0 Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.461576 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tb8v6" event={"ID":"7d150b67-86a8-48aa-9c30-3e6687d692e2","Type":"ContainerDied","Data":"25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0"} Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.461912 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tb8v6" event={"ID":"7d150b67-86a8-48aa-9c30-3e6687d692e2","Type":"ContainerDied","Data":"c7b64f6babd4c06a10e3315ebcc5af31ec9441b6f29c154da81dfdd95a868ba3"} Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.461942 4634 scope.go:117] "RemoveContainer" containerID="25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.461610 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tb8v6" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.493391 4634 scope.go:117] "RemoveContainer" containerID="84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.501851 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tb8v6"] Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.515780 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tb8v6"] Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.534552 4634 scope.go:117] "RemoveContainer" containerID="c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.574173 4634 scope.go:117] "RemoveContainer" containerID="25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0" Sep 29 14:19:47 crc kubenswrapper[4634]: E0929 14:19:47.576023 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0\": container with ID starting with 25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0 not found: ID does not exist" containerID="25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.576060 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0"} err="failed to get container status \"25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0\": rpc error: code = NotFound desc = could not find container \"25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0\": container with ID starting with 25b03414cb30470d3b54d58fcbb402d7c32bf0b224cc25a2b858ce42207a39c0 not found: ID does not exist" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.576163 4634 scope.go:117] "RemoveContainer" containerID="84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438" Sep 29 14:19:47 crc kubenswrapper[4634]: E0929 14:19:47.576495 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438\": container with ID starting with 84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438 not found: ID does not exist" containerID="84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.576525 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438"} err="failed to get container status \"84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438\": rpc error: code = NotFound desc = could not find container \"84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438\": container with ID starting with 84dbf9d9246549b0761ad32230f566d9d46309cd0f652cd3be5a57cebeb51438 not found: ID does not exist" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.576543 4634 scope.go:117] "RemoveContainer" containerID="c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d" Sep 29 14:19:47 crc kubenswrapper[4634]: E0929 14:19:47.577002 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d\": container with ID starting with c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d not found: ID does not exist" containerID="c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d" Sep 29 14:19:47 crc kubenswrapper[4634]: I0929 14:19:47.577057 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d"} err="failed to get container status \"c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d\": rpc error: code = NotFound desc = could not find container \"c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d\": container with ID starting with c227eb0a783994e3d068d27186e49b9360595d5a71a96405be5b0da8342b1a3d not found: ID does not exist" Sep 29 14:19:48 crc kubenswrapper[4634]: I0929 14:19:48.124234 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" path="/var/lib/kubelet/pods/7d150b67-86a8-48aa-9c30-3e6687d692e2/volumes" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.410563 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkr6"] Sep 29 14:19:50 crc kubenswrapper[4634]: E0929 14:19:50.411183 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerName="extract-utilities" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.411195 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerName="extract-utilities" Sep 29 14:19:50 crc kubenswrapper[4634]: E0929 14:19:50.411202 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerName="registry-server" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.411208 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerName="registry-server" Sep 29 14:19:50 crc kubenswrapper[4634]: E0929 14:19:50.411232 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerName="extract-content" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.411238 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerName="extract-content" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.411415 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d150b67-86a8-48aa-9c30-3e6687d692e2" containerName="registry-server" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.412672 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.465234 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkr6"] Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.533448 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6ln8\" (UniqueName: \"kubernetes.io/projected/a29a1431-e083-4929-9845-12e4b10baea8-kube-api-access-c6ln8\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.533539 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-catalog-content\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.533668 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-utilities\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.636132 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-utilities\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.636317 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6ln8\" (UniqueName: \"kubernetes.io/projected/a29a1431-e083-4929-9845-12e4b10baea8-kube-api-access-c6ln8\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.636368 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-catalog-content\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.637001 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-utilities\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.637051 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-catalog-content\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.658472 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6ln8\" (UniqueName: \"kubernetes.io/projected/a29a1431-e083-4929-9845-12e4b10baea8-kube-api-access-c6ln8\") pod \"redhat-marketplace-fqkr6\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:50 crc kubenswrapper[4634]: I0929 14:19:50.753762 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:19:51 crc kubenswrapper[4634]: I0929 14:19:51.117268 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkr6"] Sep 29 14:19:51 crc kubenswrapper[4634]: I0929 14:19:51.521679 4634 generic.go:334] "Generic (PLEG): container finished" podID="a29a1431-e083-4929-9845-12e4b10baea8" containerID="10e17010f116d7a8ab12bdece301811421da1dd7f70faafd76f11941f667b74b" exitCode=0 Sep 29 14:19:51 crc kubenswrapper[4634]: I0929 14:19:51.521738 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkr6" event={"ID":"a29a1431-e083-4929-9845-12e4b10baea8","Type":"ContainerDied","Data":"10e17010f116d7a8ab12bdece301811421da1dd7f70faafd76f11941f667b74b"} Sep 29 14:19:51 crc kubenswrapper[4634]: I0929 14:19:51.521775 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkr6" event={"ID":"a29a1431-e083-4929-9845-12e4b10baea8","Type":"ContainerStarted","Data":"f6c483d56ba84b1c9a5df1108e6990ddf2b0e7cf651a4ff1a1fccc4b38f38340"} Sep 29 14:19:53 crc kubenswrapper[4634]: I0929 14:19:53.547314 4634 generic.go:334] "Generic (PLEG): container finished" podID="a29a1431-e083-4929-9845-12e4b10baea8" containerID="83b2a9f63114bfb5d8149fb4dde9b375761226abd1a77cf38df33e2742d9bffb" exitCode=0 Sep 29 14:19:53 crc kubenswrapper[4634]: I0929 14:19:53.547419 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkr6" event={"ID":"a29a1431-e083-4929-9845-12e4b10baea8","Type":"ContainerDied","Data":"83b2a9f63114bfb5d8149fb4dde9b375761226abd1a77cf38df33e2742d9bffb"} Sep 29 14:19:55 crc kubenswrapper[4634]: I0929 14:19:55.584895 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkr6" event={"ID":"a29a1431-e083-4929-9845-12e4b10baea8","Type":"ContainerStarted","Data":"711643656d360dab36de0ec9b404a674911f76fb1d3859d1e201a43011b90545"} Sep 29 14:19:55 crc kubenswrapper[4634]: I0929 14:19:55.612760 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fqkr6" podStartSLOduration=3.11564569 podStartE2EDuration="5.612740764s" podCreationTimestamp="2025-09-29 14:19:50 +0000 UTC" firstStartedPulling="2025-09-29 14:19:51.524685582 +0000 UTC m=+2122.093413331" lastFinishedPulling="2025-09-29 14:19:54.021780606 +0000 UTC m=+2124.590508405" observedRunningTime="2025-09-29 14:19:55.606614289 +0000 UTC m=+2126.175342038" watchObservedRunningTime="2025-09-29 14:19:55.612740764 +0000 UTC m=+2126.181468513" Sep 29 14:20:00 crc kubenswrapper[4634]: I0929 14:20:00.753931 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:20:00 crc kubenswrapper[4634]: I0929 14:20:00.754610 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:20:00 crc kubenswrapper[4634]: I0929 14:20:00.805991 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:20:01 crc kubenswrapper[4634]: I0929 14:20:01.707988 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:20:01 crc kubenswrapper[4634]: I0929 14:20:01.811494 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkr6"] Sep 29 14:20:03 crc kubenswrapper[4634]: I0929 14:20:03.670436 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fqkr6" podUID="a29a1431-e083-4929-9845-12e4b10baea8" containerName="registry-server" containerID="cri-o://711643656d360dab36de0ec9b404a674911f76fb1d3859d1e201a43011b90545" gracePeriod=2 Sep 29 14:20:07 crc kubenswrapper[4634]: I0929 14:20:07.713832 4634 generic.go:334] "Generic (PLEG): container finished" podID="a29a1431-e083-4929-9845-12e4b10baea8" containerID="711643656d360dab36de0ec9b404a674911f76fb1d3859d1e201a43011b90545" exitCode=0 Sep 29 14:20:07 crc kubenswrapper[4634]: I0929 14:20:07.714718 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkr6" event={"ID":"a29a1431-e083-4929-9845-12e4b10baea8","Type":"ContainerDied","Data":"711643656d360dab36de0ec9b404a674911f76fb1d3859d1e201a43011b90545"} Sep 29 14:20:07 crc kubenswrapper[4634]: I0929 14:20:07.958992 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.121205 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-utilities\") pod \"a29a1431-e083-4929-9845-12e4b10baea8\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.121274 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6ln8\" (UniqueName: \"kubernetes.io/projected/a29a1431-e083-4929-9845-12e4b10baea8-kube-api-access-c6ln8\") pod \"a29a1431-e083-4929-9845-12e4b10baea8\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.121310 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-catalog-content\") pod \"a29a1431-e083-4929-9845-12e4b10baea8\" (UID: \"a29a1431-e083-4929-9845-12e4b10baea8\") " Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.124318 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-utilities" (OuterVolumeSpecName: "utilities") pod "a29a1431-e083-4929-9845-12e4b10baea8" (UID: "a29a1431-e083-4929-9845-12e4b10baea8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.141481 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29a1431-e083-4929-9845-12e4b10baea8-kube-api-access-c6ln8" (OuterVolumeSpecName: "kube-api-access-c6ln8") pod "a29a1431-e083-4929-9845-12e4b10baea8" (UID: "a29a1431-e083-4929-9845-12e4b10baea8"). InnerVolumeSpecName "kube-api-access-c6ln8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.159242 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a29a1431-e083-4929-9845-12e4b10baea8" (UID: "a29a1431-e083-4929-9845-12e4b10baea8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.223706 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.223747 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6ln8\" (UniqueName: \"kubernetes.io/projected/a29a1431-e083-4929-9845-12e4b10baea8-kube-api-access-c6ln8\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.223757 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a29a1431-e083-4929-9845-12e4b10baea8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.729351 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fqkr6" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.729337 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fqkr6" event={"ID":"a29a1431-e083-4929-9845-12e4b10baea8","Type":"ContainerDied","Data":"f6c483d56ba84b1c9a5df1108e6990ddf2b0e7cf651a4ff1a1fccc4b38f38340"} Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.730444 4634 scope.go:117] "RemoveContainer" containerID="711643656d360dab36de0ec9b404a674911f76fb1d3859d1e201a43011b90545" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.753544 4634 scope.go:117] "RemoveContainer" containerID="83b2a9f63114bfb5d8149fb4dde9b375761226abd1a77cf38df33e2742d9bffb" Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.771963 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkr6"] Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.785928 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fqkr6"] Sep 29 14:20:08 crc kubenswrapper[4634]: I0929 14:20:08.788941 4634 scope.go:117] "RemoveContainer" containerID="10e17010f116d7a8ab12bdece301811421da1dd7f70faafd76f11941f667b74b" Sep 29 14:20:10 crc kubenswrapper[4634]: I0929 14:20:10.120710 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29a1431-e083-4929-9845-12e4b10baea8" path="/var/lib/kubelet/pods/a29a1431-e083-4929-9845-12e4b10baea8/volumes" Sep 29 14:20:23 crc kubenswrapper[4634]: I0929 14:20:23.860267 4634 generic.go:334] "Generic (PLEG): container finished" podID="c2fcbf70-369d-41cd-8187-7e26848b9171" containerID="00680b619cc55944a10a6530db64e4072c97443011babe70a9e6278dca8ccc9a" exitCode=0 Sep 29 14:20:23 crc kubenswrapper[4634]: I0929 14:20:23.860369 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" event={"ID":"c2fcbf70-369d-41cd-8187-7e26848b9171","Type":"ContainerDied","Data":"00680b619cc55944a10a6530db64e4072c97443011babe70a9e6278dca8ccc9a"} Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.403703 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.598782 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-inventory\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.598842 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62jwl\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-kube-api-access-62jwl\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.598897 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.598949 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-repo-setup-combined-ca-bundle\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.598987 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-telemetry-combined-ca-bundle\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.599102 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-libvirt-combined-ca-bundle\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.599138 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-nova-combined-ca-bundle\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.599171 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-ovn-default-certs-0\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.599204 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-neutron-metadata-combined-ca-bundle\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.599247 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ovn-combined-ca-bundle\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.599291 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-bootstrap-combined-ca-bundle\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.602132 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.602207 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ssh-key\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.602230 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"c2fcbf70-369d-41cd-8187-7e26848b9171\" (UID: \"c2fcbf70-369d-41cd-8187-7e26848b9171\") " Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.605635 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.606946 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.607888 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-kube-api-access-62jwl" (OuterVolumeSpecName: "kube-api-access-62jwl") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "kube-api-access-62jwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.608952 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.609991 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.611307 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.614072 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.614164 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.614275 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.615542 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.624475 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.626000 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.636460 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-inventory" (OuterVolumeSpecName: "inventory") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.651803 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c2fcbf70-369d-41cd-8187-7e26848b9171" (UID: "c2fcbf70-369d-41cd-8187-7e26848b9171"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.704932 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62jwl\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-kube-api-access-62jwl\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.704972 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.704986 4634 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705000 4634 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705012 4634 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705027 4634 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705038 4634 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705049 4634 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705063 4634 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705075 4634 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705101 4634 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705114 4634 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705126 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2fcbf70-369d-41cd-8187-7e26848b9171-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.705137 4634 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/c2fcbf70-369d-41cd-8187-7e26848b9171-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.876478 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" event={"ID":"c2fcbf70-369d-41cd-8187-7e26848b9171","Type":"ContainerDied","Data":"ae88494d3e3f030df6735b45a0973cf920653c45ed9d60927837fed10123e920"} Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.876519 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae88494d3e3f030df6735b45a0973cf920653c45ed9d60927837fed10123e920" Sep 29 14:20:25 crc kubenswrapper[4634]: I0929 14:20:25.876595 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.015187 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49"] Sep 29 14:20:26 crc kubenswrapper[4634]: E0929 14:20:26.015605 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29a1431-e083-4929-9845-12e4b10baea8" containerName="extract-content" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.015623 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29a1431-e083-4929-9845-12e4b10baea8" containerName="extract-content" Sep 29 14:20:26 crc kubenswrapper[4634]: E0929 14:20:26.015649 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29a1431-e083-4929-9845-12e4b10baea8" containerName="registry-server" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.015657 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29a1431-e083-4929-9845-12e4b10baea8" containerName="registry-server" Sep 29 14:20:26 crc kubenswrapper[4634]: E0929 14:20:26.015682 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29a1431-e083-4929-9845-12e4b10baea8" containerName="extract-utilities" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.015688 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29a1431-e083-4929-9845-12e4b10baea8" containerName="extract-utilities" Sep 29 14:20:26 crc kubenswrapper[4634]: E0929 14:20:26.015702 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2fcbf70-369d-41cd-8187-7e26848b9171" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.015708 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2fcbf70-369d-41cd-8187-7e26848b9171" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.015879 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29a1431-e083-4929-9845-12e4b10baea8" containerName="registry-server" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.015893 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2fcbf70-369d-41cd-8187-7e26848b9171" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.016517 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.018860 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.018947 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.019329 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.019630 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.020878 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.033655 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49"] Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.112821 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.112908 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.112940 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rg2v\" (UniqueName: \"kubernetes.io/projected/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-kube-api-access-2rg2v\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.112974 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.113146 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.214987 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.215149 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.215189 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rg2v\" (UniqueName: \"kubernetes.io/projected/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-kube-api-access-2rg2v\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.215230 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.215300 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.216218 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.220759 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.223624 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.225685 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.234831 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rg2v\" (UniqueName: \"kubernetes.io/projected/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-kube-api-access-2rg2v\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ztr49\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.370466 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:20:26 crc kubenswrapper[4634]: I0929 14:20:26.996186 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49"] Sep 29 14:20:27 crc kubenswrapper[4634]: I0929 14:20:27.894879 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" event={"ID":"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41","Type":"ContainerStarted","Data":"f85f1e52246c266f286b90b39e8a8e9ea5c9c5d07d5e2f2ab06b8660258bc50e"} Sep 29 14:20:27 crc kubenswrapper[4634]: I0929 14:20:27.895248 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" event={"ID":"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41","Type":"ContainerStarted","Data":"a0b9739e33c1279ab1cf80e4961523435cb653e2d329f0c7bfd05167bf0d749b"} Sep 29 14:20:27 crc kubenswrapper[4634]: I0929 14:20:27.922456 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" podStartSLOduration=2.7370625090000003 podStartE2EDuration="2.922438093s" podCreationTimestamp="2025-09-29 14:20:25 +0000 UTC" firstStartedPulling="2025-09-29 14:20:27.007396732 +0000 UTC m=+2157.576124481" lastFinishedPulling="2025-09-29 14:20:27.192772316 +0000 UTC m=+2157.761500065" observedRunningTime="2025-09-29 14:20:27.919358415 +0000 UTC m=+2158.488086164" watchObservedRunningTime="2025-09-29 14:20:27.922438093 +0000 UTC m=+2158.491165832" Sep 29 14:20:44 crc kubenswrapper[4634]: I0929 14:20:44.396581 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:20:44 crc kubenswrapper[4634]: I0929 14:20:44.397276 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:21:14 crc kubenswrapper[4634]: I0929 14:21:14.396923 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:21:14 crc kubenswrapper[4634]: I0929 14:21:14.398346 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:21:37 crc kubenswrapper[4634]: I0929 14:21:37.623853 4634 generic.go:334] "Generic (PLEG): container finished" podID="4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" containerID="f85f1e52246c266f286b90b39e8a8e9ea5c9c5d07d5e2f2ab06b8660258bc50e" exitCode=0 Sep 29 14:21:37 crc kubenswrapper[4634]: I0929 14:21:37.624021 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" event={"ID":"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41","Type":"ContainerDied","Data":"f85f1e52246c266f286b90b39e8a8e9ea5c9c5d07d5e2f2ab06b8660258bc50e"} Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.089179 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.171459 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovncontroller-config-0\") pod \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.171575 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-inventory\") pod \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.171624 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ssh-key\") pod \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.171658 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovn-combined-ca-bundle\") pod \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.171759 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rg2v\" (UniqueName: \"kubernetes.io/projected/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-kube-api-access-2rg2v\") pod \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\" (UID: \"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41\") " Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.178754 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" (UID: "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.182303 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-kube-api-access-2rg2v" (OuterVolumeSpecName: "kube-api-access-2rg2v") pod "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" (UID: "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41"). InnerVolumeSpecName "kube-api-access-2rg2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.209060 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" (UID: "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.209101 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-inventory" (OuterVolumeSpecName: "inventory") pod "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" (UID: "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.209528 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" (UID: "4ffd6a52-c5fb-4796-b98b-c5ca2a238a41"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.273922 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.273965 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.273979 4634 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.274015 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rg2v\" (UniqueName: \"kubernetes.io/projected/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-kube-api-access-2rg2v\") on node \"crc\" DevicePath \"\"" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.274028 4634 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/4ffd6a52-c5fb-4796-b98b-c5ca2a238a41-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.654115 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" event={"ID":"4ffd6a52-c5fb-4796-b98b-c5ca2a238a41","Type":"ContainerDied","Data":"a0b9739e33c1279ab1cf80e4961523435cb653e2d329f0c7bfd05167bf0d749b"} Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.654817 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0b9739e33c1279ab1cf80e4961523435cb653e2d329f0c7bfd05167bf0d749b" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.654266 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ztr49" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.780909 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg"] Sep 29 14:21:39 crc kubenswrapper[4634]: E0929 14:21:39.781494 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.781519 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.781810 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ffd6a52-c5fb-4796-b98b-c5ca2a238a41" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.782572 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.785171 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.786225 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.787067 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.787408 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.787686 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.791887 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.797723 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg"] Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.889285 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.889363 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.889515 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.889541 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h842p\" (UniqueName: \"kubernetes.io/projected/b183018d-383a-4d89-bb1f-d5c1f13404a9-kube-api-access-h842p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.889607 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.889673 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.991882 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.991930 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h842p\" (UniqueName: \"kubernetes.io/projected/b183018d-383a-4d89-bb1f-d5c1f13404a9-kube-api-access-h842p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.991995 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.992029 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.992094 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:39 crc kubenswrapper[4634]: I0929 14:21:39.992129 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:40 crc kubenswrapper[4634]: I0929 14:21:40.008234 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:40 crc kubenswrapper[4634]: I0929 14:21:40.009469 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:40 crc kubenswrapper[4634]: I0929 14:21:40.009539 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:40 crc kubenswrapper[4634]: I0929 14:21:40.010771 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:40 crc kubenswrapper[4634]: I0929 14:21:40.012750 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h842p\" (UniqueName: \"kubernetes.io/projected/b183018d-383a-4d89-bb1f-d5c1f13404a9-kube-api-access-h842p\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:40 crc kubenswrapper[4634]: I0929 14:21:40.022956 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:40 crc kubenswrapper[4634]: I0929 14:21:40.116422 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:21:40 crc kubenswrapper[4634]: I0929 14:21:40.663848 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg"] Sep 29 14:21:41 crc kubenswrapper[4634]: I0929 14:21:41.679036 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" event={"ID":"b183018d-383a-4d89-bb1f-d5c1f13404a9","Type":"ContainerStarted","Data":"ea063a3e27b434c7b17f41593a0f136409c8aee1eecf7363c08b326ae1da6e0b"} Sep 29 14:21:41 crc kubenswrapper[4634]: I0929 14:21:41.680814 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" event={"ID":"b183018d-383a-4d89-bb1f-d5c1f13404a9","Type":"ContainerStarted","Data":"5557683ca6aef797700f0dfe308cbddb216fb5157217133efeec770c4b164a1c"} Sep 29 14:21:41 crc kubenswrapper[4634]: I0929 14:21:41.699408 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" podStartSLOduration=2.55757957 podStartE2EDuration="2.699384484s" podCreationTimestamp="2025-09-29 14:21:39 +0000 UTC" firstStartedPulling="2025-09-29 14:21:40.670405024 +0000 UTC m=+2231.239132773" lastFinishedPulling="2025-09-29 14:21:40.812209928 +0000 UTC m=+2231.380937687" observedRunningTime="2025-09-29 14:21:41.698005836 +0000 UTC m=+2232.266733625" watchObservedRunningTime="2025-09-29 14:21:41.699384484 +0000 UTC m=+2232.268112253" Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.395931 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.396239 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.396281 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.397028 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.397079 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" gracePeriod=600 Sep 29 14:21:44 crc kubenswrapper[4634]: E0929 14:21:44.518541 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.710359 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" exitCode=0 Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.710408 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423"} Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.710445 4634 scope.go:117] "RemoveContainer" containerID="4b3f11a3b532b31a5f39dc3954037054fe5968d53a04cc1b756a9b0a70e96890" Sep 29 14:21:44 crc kubenswrapper[4634]: I0929 14:21:44.711203 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:21:44 crc kubenswrapper[4634]: E0929 14:21:44.711603 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:21:59 crc kubenswrapper[4634]: I0929 14:21:59.110768 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:21:59 crc kubenswrapper[4634]: E0929 14:21:59.111956 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:22:11 crc kubenswrapper[4634]: I0929 14:22:11.110812 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:22:11 crc kubenswrapper[4634]: E0929 14:22:11.111861 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:22:26 crc kubenswrapper[4634]: I0929 14:22:26.110247 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:22:26 crc kubenswrapper[4634]: E0929 14:22:26.111104 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:22:34 crc kubenswrapper[4634]: I0929 14:22:34.187320 4634 generic.go:334] "Generic (PLEG): container finished" podID="b183018d-383a-4d89-bb1f-d5c1f13404a9" containerID="ea063a3e27b434c7b17f41593a0f136409c8aee1eecf7363c08b326ae1da6e0b" exitCode=0 Sep 29 14:22:34 crc kubenswrapper[4634]: I0929 14:22:34.187414 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" event={"ID":"b183018d-383a-4d89-bb1f-d5c1f13404a9","Type":"ContainerDied","Data":"ea063a3e27b434c7b17f41593a0f136409c8aee1eecf7363c08b326ae1da6e0b"} Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.683776 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.840355 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h842p\" (UniqueName: \"kubernetes.io/projected/b183018d-383a-4d89-bb1f-d5c1f13404a9-kube-api-access-h842p\") pod \"b183018d-383a-4d89-bb1f-d5c1f13404a9\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.840879 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-metadata-combined-ca-bundle\") pod \"b183018d-383a-4d89-bb1f-d5c1f13404a9\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.841054 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-inventory\") pod \"b183018d-383a-4d89-bb1f-d5c1f13404a9\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.841188 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-ssh-key\") pod \"b183018d-383a-4d89-bb1f-d5c1f13404a9\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.841297 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-nova-metadata-neutron-config-0\") pod \"b183018d-383a-4d89-bb1f-d5c1f13404a9\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.841412 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-ovn-metadata-agent-neutron-config-0\") pod \"b183018d-383a-4d89-bb1f-d5c1f13404a9\" (UID: \"b183018d-383a-4d89-bb1f-d5c1f13404a9\") " Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.848290 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "b183018d-383a-4d89-bb1f-d5c1f13404a9" (UID: "b183018d-383a-4d89-bb1f-d5c1f13404a9"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.848354 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b183018d-383a-4d89-bb1f-d5c1f13404a9-kube-api-access-h842p" (OuterVolumeSpecName: "kube-api-access-h842p") pod "b183018d-383a-4d89-bb1f-d5c1f13404a9" (UID: "b183018d-383a-4d89-bb1f-d5c1f13404a9"). InnerVolumeSpecName "kube-api-access-h842p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.874646 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-inventory" (OuterVolumeSpecName: "inventory") pod "b183018d-383a-4d89-bb1f-d5c1f13404a9" (UID: "b183018d-383a-4d89-bb1f-d5c1f13404a9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.876435 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "b183018d-383a-4d89-bb1f-d5c1f13404a9" (UID: "b183018d-383a-4d89-bb1f-d5c1f13404a9"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.878214 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "b183018d-383a-4d89-bb1f-d5c1f13404a9" (UID: "b183018d-383a-4d89-bb1f-d5c1f13404a9"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.883475 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b183018d-383a-4d89-bb1f-d5c1f13404a9" (UID: "b183018d-383a-4d89-bb1f-d5c1f13404a9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.944961 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h842p\" (UniqueName: \"kubernetes.io/projected/b183018d-383a-4d89-bb1f-d5c1f13404a9-kube-api-access-h842p\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.945009 4634 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.945024 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.945039 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.945052 4634 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:35 crc kubenswrapper[4634]: I0929 14:22:35.945070 4634 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b183018d-383a-4d89-bb1f-d5c1f13404a9-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.206860 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" event={"ID":"b183018d-383a-4d89-bb1f-d5c1f13404a9","Type":"ContainerDied","Data":"5557683ca6aef797700f0dfe308cbddb216fb5157217133efeec770c4b164a1c"} Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.206914 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5557683ca6aef797700f0dfe308cbddb216fb5157217133efeec770c4b164a1c" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.206963 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.299574 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs"] Sep 29 14:22:36 crc kubenswrapper[4634]: E0929 14:22:36.299982 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b183018d-383a-4d89-bb1f-d5c1f13404a9" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.299997 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b183018d-383a-4d89-bb1f-d5c1f13404a9" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.300200 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b183018d-383a-4d89-bb1f-d5c1f13404a9" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.300850 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.302724 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.302724 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.305242 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.305343 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.311783 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs"] Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.314038 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.454105 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.454174 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.454197 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.454229 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgvb4\" (UniqueName: \"kubernetes.io/projected/a38e3e32-cd47-4afd-aa38-da7911b1a12f-kube-api-access-qgvb4\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.454318 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.555575 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.555645 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.555699 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.555719 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.555756 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgvb4\" (UniqueName: \"kubernetes.io/projected/a38e3e32-cd47-4afd-aa38-da7911b1a12f-kube-api-access-qgvb4\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.559824 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.559928 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.560967 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.561995 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.579552 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgvb4\" (UniqueName: \"kubernetes.io/projected/a38e3e32-cd47-4afd-aa38-da7911b1a12f-kube-api-access-qgvb4\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:36 crc kubenswrapper[4634]: I0929 14:22:36.617092 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:22:37 crc kubenswrapper[4634]: I0929 14:22:37.169549 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs"] Sep 29 14:22:37 crc kubenswrapper[4634]: W0929 14:22:37.190005 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda38e3e32_cd47_4afd_aa38_da7911b1a12f.slice/crio-cd6cad4b72bceb7e38c55a7847a3ce53bc571660ebb94692e56f255461b0849d WatchSource:0}: Error finding container cd6cad4b72bceb7e38c55a7847a3ce53bc571660ebb94692e56f255461b0849d: Status 404 returned error can't find the container with id cd6cad4b72bceb7e38c55a7847a3ce53bc571660ebb94692e56f255461b0849d Sep 29 14:22:37 crc kubenswrapper[4634]: I0929 14:22:37.221550 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" event={"ID":"a38e3e32-cd47-4afd-aa38-da7911b1a12f","Type":"ContainerStarted","Data":"cd6cad4b72bceb7e38c55a7847a3ce53bc571660ebb94692e56f255461b0849d"} Sep 29 14:22:38 crc kubenswrapper[4634]: I0929 14:22:38.231196 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" event={"ID":"a38e3e32-cd47-4afd-aa38-da7911b1a12f","Type":"ContainerStarted","Data":"8ade2fea2dc9eadfa722048ce5ba1239a6d11af1501b4619c25d802191f514d2"} Sep 29 14:22:38 crc kubenswrapper[4634]: I0929 14:22:38.251898 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" podStartSLOduration=2.079105034 podStartE2EDuration="2.251876672s" podCreationTimestamp="2025-09-29 14:22:36 +0000 UTC" firstStartedPulling="2025-09-29 14:22:37.194800758 +0000 UTC m=+2287.763528517" lastFinishedPulling="2025-09-29 14:22:37.367572406 +0000 UTC m=+2287.936300155" observedRunningTime="2025-09-29 14:22:38.247384446 +0000 UTC m=+2288.816112195" watchObservedRunningTime="2025-09-29 14:22:38.251876672 +0000 UTC m=+2288.820604421" Sep 29 14:22:39 crc kubenswrapper[4634]: I0929 14:22:39.111331 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:22:39 crc kubenswrapper[4634]: E0929 14:22:39.111527 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:22:50 crc kubenswrapper[4634]: I0929 14:22:50.115542 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:22:50 crc kubenswrapper[4634]: E0929 14:22:50.117782 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:23:02 crc kubenswrapper[4634]: I0929 14:23:02.111213 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:23:02 crc kubenswrapper[4634]: E0929 14:23:02.114294 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:23:15 crc kubenswrapper[4634]: I0929 14:23:15.110921 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:23:15 crc kubenswrapper[4634]: E0929 14:23:15.112877 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:23:30 crc kubenswrapper[4634]: I0929 14:23:30.110688 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:23:30 crc kubenswrapper[4634]: E0929 14:23:30.111539 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:23:43 crc kubenswrapper[4634]: I0929 14:23:43.110681 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:23:43 crc kubenswrapper[4634]: E0929 14:23:43.111380 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:23:58 crc kubenswrapper[4634]: I0929 14:23:58.110304 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:23:58 crc kubenswrapper[4634]: E0929 14:23:58.111276 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:24:11 crc kubenswrapper[4634]: I0929 14:24:11.110963 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:24:11 crc kubenswrapper[4634]: E0929 14:24:11.112201 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:24:24 crc kubenswrapper[4634]: I0929 14:24:24.110837 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:24:24 crc kubenswrapper[4634]: E0929 14:24:24.111837 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:24:35 crc kubenswrapper[4634]: I0929 14:24:35.118405 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:24:35 crc kubenswrapper[4634]: E0929 14:24:35.120010 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:24:47 crc kubenswrapper[4634]: I0929 14:24:47.110321 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:24:47 crc kubenswrapper[4634]: E0929 14:24:47.112964 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:24:58 crc kubenswrapper[4634]: I0929 14:24:58.112115 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:24:58 crc kubenswrapper[4634]: E0929 14:24:58.114695 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:25:13 crc kubenswrapper[4634]: I0929 14:25:13.125655 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:25:13 crc kubenswrapper[4634]: E0929 14:25:13.127065 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:25:25 crc kubenswrapper[4634]: I0929 14:25:25.112195 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:25:25 crc kubenswrapper[4634]: E0929 14:25:25.112853 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:25:36 crc kubenswrapper[4634]: I0929 14:25:36.110626 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:25:36 crc kubenswrapper[4634]: E0929 14:25:36.111418 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:25:50 crc kubenswrapper[4634]: I0929 14:25:50.118734 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:25:50 crc kubenswrapper[4634]: E0929 14:25:50.120046 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:26:05 crc kubenswrapper[4634]: I0929 14:26:05.111410 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:26:05 crc kubenswrapper[4634]: E0929 14:26:05.112466 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:26:19 crc kubenswrapper[4634]: I0929 14:26:19.111330 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:26:19 crc kubenswrapper[4634]: E0929 14:26:19.113040 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:26:33 crc kubenswrapper[4634]: I0929 14:26:33.110785 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:26:33 crc kubenswrapper[4634]: E0929 14:26:33.111728 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:26:48 crc kubenswrapper[4634]: I0929 14:26:48.109978 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:26:48 crc kubenswrapper[4634]: I0929 14:26:48.773274 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"531b311522f5a8d0b24fa9743853f85282837cdcd078122b14beb225d3ea508a"} Sep 29 14:27:21 crc kubenswrapper[4634]: I0929 14:27:21.098748 4634 generic.go:334] "Generic (PLEG): container finished" podID="a38e3e32-cd47-4afd-aa38-da7911b1a12f" containerID="8ade2fea2dc9eadfa722048ce5ba1239a6d11af1501b4619c25d802191f514d2" exitCode=0 Sep 29 14:27:21 crc kubenswrapper[4634]: I0929 14:27:21.098834 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" event={"ID":"a38e3e32-cd47-4afd-aa38-da7911b1a12f","Type":"ContainerDied","Data":"8ade2fea2dc9eadfa722048ce5ba1239a6d11af1501b4619c25d802191f514d2"} Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.597419 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.732773 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-inventory\") pod \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.733299 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-ssh-key\") pod \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.733484 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgvb4\" (UniqueName: \"kubernetes.io/projected/a38e3e32-cd47-4afd-aa38-da7911b1a12f-kube-api-access-qgvb4\") pod \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.734298 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-secret-0\") pod \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.734405 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-combined-ca-bundle\") pod \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\" (UID: \"a38e3e32-cd47-4afd-aa38-da7911b1a12f\") " Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.739970 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a38e3e32-cd47-4afd-aa38-da7911b1a12f-kube-api-access-qgvb4" (OuterVolumeSpecName: "kube-api-access-qgvb4") pod "a38e3e32-cd47-4afd-aa38-da7911b1a12f" (UID: "a38e3e32-cd47-4afd-aa38-da7911b1a12f"). InnerVolumeSpecName "kube-api-access-qgvb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.740806 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "a38e3e32-cd47-4afd-aa38-da7911b1a12f" (UID: "a38e3e32-cd47-4afd-aa38-da7911b1a12f"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.768135 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-inventory" (OuterVolumeSpecName: "inventory") pod "a38e3e32-cd47-4afd-aa38-da7911b1a12f" (UID: "a38e3e32-cd47-4afd-aa38-da7911b1a12f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.792338 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a38e3e32-cd47-4afd-aa38-da7911b1a12f" (UID: "a38e3e32-cd47-4afd-aa38-da7911b1a12f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.804498 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "a38e3e32-cd47-4afd-aa38-da7911b1a12f" (UID: "a38e3e32-cd47-4afd-aa38-da7911b1a12f"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.837068 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.837363 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.837440 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgvb4\" (UniqueName: \"kubernetes.io/projected/a38e3e32-cd47-4afd-aa38-da7911b1a12f-kube-api-access-qgvb4\") on node \"crc\" DevicePath \"\"" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.837514 4634 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:27:22 crc kubenswrapper[4634]: I0929 14:27:22.837588 4634 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a38e3e32-cd47-4afd-aa38-da7911b1a12f-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.119006 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" event={"ID":"a38e3e32-cd47-4afd-aa38-da7911b1a12f","Type":"ContainerDied","Data":"cd6cad4b72bceb7e38c55a7847a3ce53bc571660ebb94692e56f255461b0849d"} Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.119063 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd6cad4b72bceb7e38c55a7847a3ce53bc571660ebb94692e56f255461b0849d" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.119206 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.322294 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd"] Sep 29 14:27:23 crc kubenswrapper[4634]: E0929 14:27:23.322752 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a38e3e32-cd47-4afd-aa38-da7911b1a12f" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.322768 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a38e3e32-cd47-4afd-aa38-da7911b1a12f" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.322930 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a38e3e32-cd47-4afd-aa38-da7911b1a12f" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.323772 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.328561 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.328591 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.328682 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.328804 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.328882 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.328961 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.333693 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.338170 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd"] Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450226 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450270 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450293 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450322 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450424 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450496 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450548 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450573 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/4dbe661a-c031-4716-9816-d5cb05957a35-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.450612 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzflc\" (UniqueName: \"kubernetes.io/projected/4dbe661a-c031-4716-9816-d5cb05957a35-kube-api-access-nzflc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.553766 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.553876 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.553916 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.553973 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.554055 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.554165 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.554242 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.554324 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/4dbe661a-c031-4716-9816-d5cb05957a35-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.554393 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzflc\" (UniqueName: \"kubernetes.io/projected/4dbe661a-c031-4716-9816-d5cb05957a35-kube-api-access-nzflc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.558348 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.558581 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/4dbe661a-c031-4716-9816-d5cb05957a35-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.559881 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.560516 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.561130 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.562902 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.562970 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.568580 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.574412 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzflc\" (UniqueName: \"kubernetes.io/projected/4dbe661a-c031-4716-9816-d5cb05957a35-kube-api-access-nzflc\") pod \"nova-edpm-deployment-openstack-edpm-ipam-x58xd\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:23 crc kubenswrapper[4634]: I0929 14:27:23.652041 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:27:24 crc kubenswrapper[4634]: I0929 14:27:24.177166 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd"] Sep 29 14:27:24 crc kubenswrapper[4634]: I0929 14:27:24.183601 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:27:25 crc kubenswrapper[4634]: I0929 14:27:25.142995 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" event={"ID":"4dbe661a-c031-4716-9816-d5cb05957a35","Type":"ContainerStarted","Data":"7b2226a642d53ff87fc06b471df82e79e199adb5fd9cf14c6f3ff8947aa19b4f"} Sep 29 14:27:26 crc kubenswrapper[4634]: I0929 14:27:26.155593 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" event={"ID":"4dbe661a-c031-4716-9816-d5cb05957a35","Type":"ContainerStarted","Data":"fa1aaed74f30bb4af32197031b4ee1fcc4869dd0f651fe045e721fb01a653f3f"} Sep 29 14:27:26 crc kubenswrapper[4634]: I0929 14:27:26.186387 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" podStartSLOduration=2.32895116 podStartE2EDuration="3.186365648s" podCreationTimestamp="2025-09-29 14:27:23 +0000 UTC" firstStartedPulling="2025-09-29 14:27:24.183397736 +0000 UTC m=+2574.752125485" lastFinishedPulling="2025-09-29 14:27:25.040812214 +0000 UTC m=+2575.609539973" observedRunningTime="2025-09-29 14:27:26.176193235 +0000 UTC m=+2576.744921004" watchObservedRunningTime="2025-09-29 14:27:26.186365648 +0000 UTC m=+2576.755093397" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.248699 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f6prs"] Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.251594 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.277193 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6prs"] Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.348553 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-utilities\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.348666 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-catalog-content\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.348717 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m92cr\" (UniqueName: \"kubernetes.io/projected/337e0c99-c200-4b40-af17-cd0aa6afb142-kube-api-access-m92cr\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.450637 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-catalog-content\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.450720 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m92cr\" (UniqueName: \"kubernetes.io/projected/337e0c99-c200-4b40-af17-cd0aa6afb142-kube-api-access-m92cr\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.450789 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-utilities\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.451398 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-catalog-content\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.451418 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-utilities\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.471944 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m92cr\" (UniqueName: \"kubernetes.io/projected/337e0c99-c200-4b40-af17-cd0aa6afb142-kube-api-access-m92cr\") pod \"community-operators-f6prs\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:08 crc kubenswrapper[4634]: I0929 14:29:08.573613 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:09 crc kubenswrapper[4634]: I0929 14:29:09.198313 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6prs"] Sep 29 14:29:09 crc kubenswrapper[4634]: I0929 14:29:09.297321 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6prs" event={"ID":"337e0c99-c200-4b40-af17-cd0aa6afb142","Type":"ContainerStarted","Data":"e4c3b72a3c3645d7e7383a74a92207d95e079c10b9ee07442df7c09aa8484d00"} Sep 29 14:29:10 crc kubenswrapper[4634]: I0929 14:29:10.306499 4634 generic.go:334] "Generic (PLEG): container finished" podID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerID="3cd08677bc71cae6f48e5128da0a59f7ffc8475eb303425ae314b781b629d88d" exitCode=0 Sep 29 14:29:10 crc kubenswrapper[4634]: I0929 14:29:10.306612 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6prs" event={"ID":"337e0c99-c200-4b40-af17-cd0aa6afb142","Type":"ContainerDied","Data":"3cd08677bc71cae6f48e5128da0a59f7ffc8475eb303425ae314b781b629d88d"} Sep 29 14:29:12 crc kubenswrapper[4634]: I0929 14:29:12.327813 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6prs" event={"ID":"337e0c99-c200-4b40-af17-cd0aa6afb142","Type":"ContainerStarted","Data":"95a53188cd1e26a98314f778b2b931c4f9e1d95ea3ffc65bfe4fcf0c66c588f2"} Sep 29 14:29:13 crc kubenswrapper[4634]: I0929 14:29:13.339170 4634 generic.go:334] "Generic (PLEG): container finished" podID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerID="95a53188cd1e26a98314f778b2b931c4f9e1d95ea3ffc65bfe4fcf0c66c588f2" exitCode=0 Sep 29 14:29:13 crc kubenswrapper[4634]: I0929 14:29:13.339478 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6prs" event={"ID":"337e0c99-c200-4b40-af17-cd0aa6afb142","Type":"ContainerDied","Data":"95a53188cd1e26a98314f778b2b931c4f9e1d95ea3ffc65bfe4fcf0c66c588f2"} Sep 29 14:29:14 crc kubenswrapper[4634]: I0929 14:29:14.356208 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6prs" event={"ID":"337e0c99-c200-4b40-af17-cd0aa6afb142","Type":"ContainerStarted","Data":"5a715e8b4d1c1077487c37b91827f454ee219ad33143a98d47db292e235f06c4"} Sep 29 14:29:14 crc kubenswrapper[4634]: I0929 14:29:14.384660 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f6prs" podStartSLOduration=2.888867378 podStartE2EDuration="6.384638412s" podCreationTimestamp="2025-09-29 14:29:08 +0000 UTC" firstStartedPulling="2025-09-29 14:29:10.310004223 +0000 UTC m=+2680.878731972" lastFinishedPulling="2025-09-29 14:29:13.805775247 +0000 UTC m=+2684.374503006" observedRunningTime="2025-09-29 14:29:14.377440144 +0000 UTC m=+2684.946167893" watchObservedRunningTime="2025-09-29 14:29:14.384638412 +0000 UTC m=+2684.953366171" Sep 29 14:29:14 crc kubenswrapper[4634]: I0929 14:29:14.395905 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:29:14 crc kubenswrapper[4634]: I0929 14:29:14.395977 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:29:18 crc kubenswrapper[4634]: I0929 14:29:18.574132 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:18 crc kubenswrapper[4634]: I0929 14:29:18.574502 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:18 crc kubenswrapper[4634]: I0929 14:29:18.631908 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:19 crc kubenswrapper[4634]: I0929 14:29:19.444755 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:19 crc kubenswrapper[4634]: I0929 14:29:19.493177 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6prs"] Sep 29 14:29:21 crc kubenswrapper[4634]: I0929 14:29:21.414128 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f6prs" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerName="registry-server" containerID="cri-o://5a715e8b4d1c1077487c37b91827f454ee219ad33143a98d47db292e235f06c4" gracePeriod=2 Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.428229 4634 generic.go:334] "Generic (PLEG): container finished" podID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerID="5a715e8b4d1c1077487c37b91827f454ee219ad33143a98d47db292e235f06c4" exitCode=0 Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.428381 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6prs" event={"ID":"337e0c99-c200-4b40-af17-cd0aa6afb142","Type":"ContainerDied","Data":"5a715e8b4d1c1077487c37b91827f454ee219ad33143a98d47db292e235f06c4"} Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.428583 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6prs" event={"ID":"337e0c99-c200-4b40-af17-cd0aa6afb142","Type":"ContainerDied","Data":"e4c3b72a3c3645d7e7383a74a92207d95e079c10b9ee07442df7c09aa8484d00"} Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.428600 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4c3b72a3c3645d7e7383a74a92207d95e079c10b9ee07442df7c09aa8484d00" Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.463491 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.659988 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-utilities\") pod \"337e0c99-c200-4b40-af17-cd0aa6afb142\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.660129 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m92cr\" (UniqueName: \"kubernetes.io/projected/337e0c99-c200-4b40-af17-cd0aa6afb142-kube-api-access-m92cr\") pod \"337e0c99-c200-4b40-af17-cd0aa6afb142\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.660154 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-catalog-content\") pod \"337e0c99-c200-4b40-af17-cd0aa6afb142\" (UID: \"337e0c99-c200-4b40-af17-cd0aa6afb142\") " Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.660764 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-utilities" (OuterVolumeSpecName: "utilities") pod "337e0c99-c200-4b40-af17-cd0aa6afb142" (UID: "337e0c99-c200-4b40-af17-cd0aa6afb142"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.666275 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/337e0c99-c200-4b40-af17-cd0aa6afb142-kube-api-access-m92cr" (OuterVolumeSpecName: "kube-api-access-m92cr") pod "337e0c99-c200-4b40-af17-cd0aa6afb142" (UID: "337e0c99-c200-4b40-af17-cd0aa6afb142"). InnerVolumeSpecName "kube-api-access-m92cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.708051 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "337e0c99-c200-4b40-af17-cd0aa6afb142" (UID: "337e0c99-c200-4b40-af17-cd0aa6afb142"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.762380 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m92cr\" (UniqueName: \"kubernetes.io/projected/337e0c99-c200-4b40-af17-cd0aa6afb142-kube-api-access-m92cr\") on node \"crc\" DevicePath \"\"" Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.762415 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:29:22 crc kubenswrapper[4634]: I0929 14:29:22.762425 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/337e0c99-c200-4b40-af17-cd0aa6afb142-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:29:23 crc kubenswrapper[4634]: I0929 14:29:23.435346 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6prs" Sep 29 14:29:23 crc kubenswrapper[4634]: I0929 14:29:23.464197 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6prs"] Sep 29 14:29:23 crc kubenswrapper[4634]: I0929 14:29:23.487067 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f6prs"] Sep 29 14:29:24 crc kubenswrapper[4634]: I0929 14:29:24.120341 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" path="/var/lib/kubelet/pods/337e0c99-c200-4b40-af17-cd0aa6afb142/volumes" Sep 29 14:29:44 crc kubenswrapper[4634]: I0929 14:29:44.396508 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:29:44 crc kubenswrapper[4634]: I0929 14:29:44.398512 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.160844 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6"] Sep 29 14:30:00 crc kubenswrapper[4634]: E0929 14:30:00.161875 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerName="extract-utilities" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.161889 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerName="extract-utilities" Sep 29 14:30:00 crc kubenswrapper[4634]: E0929 14:30:00.161912 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerName="registry-server" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.161918 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerName="registry-server" Sep 29 14:30:00 crc kubenswrapper[4634]: E0929 14:30:00.161949 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerName="extract-content" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.161958 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerName="extract-content" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.162164 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="337e0c99-c200-4b40-af17-cd0aa6afb142" containerName="registry-server" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.162933 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.165579 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.165773 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.170783 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6"] Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.291597 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32692823-592d-42f2-9e32-890eee58f9b7-secret-volume\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.292050 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32692823-592d-42f2-9e32-890eee58f9b7-config-volume\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.292144 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzz9z\" (UniqueName: \"kubernetes.io/projected/32692823-592d-42f2-9e32-890eee58f9b7-kube-api-access-qzz9z\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.393770 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32692823-592d-42f2-9e32-890eee58f9b7-config-volume\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.393868 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzz9z\" (UniqueName: \"kubernetes.io/projected/32692823-592d-42f2-9e32-890eee58f9b7-kube-api-access-qzz9z\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.393942 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32692823-592d-42f2-9e32-890eee58f9b7-secret-volume\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.394657 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32692823-592d-42f2-9e32-890eee58f9b7-config-volume\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.413177 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32692823-592d-42f2-9e32-890eee58f9b7-secret-volume\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.416846 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzz9z\" (UniqueName: \"kubernetes.io/projected/32692823-592d-42f2-9e32-890eee58f9b7-kube-api-access-qzz9z\") pod \"collect-profiles-29319270-vs2z6\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:00 crc kubenswrapper[4634]: I0929 14:30:00.495386 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:01 crc kubenswrapper[4634]: I0929 14:30:01.005512 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6"] Sep 29 14:30:01 crc kubenswrapper[4634]: I0929 14:30:01.755456 4634 generic.go:334] "Generic (PLEG): container finished" podID="32692823-592d-42f2-9e32-890eee58f9b7" containerID="d4f765e12f59a7b0ae3813b5a2bd75afd38677875dd730075b05f6df886cc401" exitCode=0 Sep 29 14:30:01 crc kubenswrapper[4634]: I0929 14:30:01.755730 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" event={"ID":"32692823-592d-42f2-9e32-890eee58f9b7","Type":"ContainerDied","Data":"d4f765e12f59a7b0ae3813b5a2bd75afd38677875dd730075b05f6df886cc401"} Sep 29 14:30:01 crc kubenswrapper[4634]: I0929 14:30:01.755760 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" event={"ID":"32692823-592d-42f2-9e32-890eee58f9b7","Type":"ContainerStarted","Data":"8106d109c14db31951e7b96c6949c3cdcbfd0ac7502320a1833f2e12c45cb2dd"} Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.139829 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.252543 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32692823-592d-42f2-9e32-890eee58f9b7-config-volume\") pod \"32692823-592d-42f2-9e32-890eee58f9b7\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.252595 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzz9z\" (UniqueName: \"kubernetes.io/projected/32692823-592d-42f2-9e32-890eee58f9b7-kube-api-access-qzz9z\") pod \"32692823-592d-42f2-9e32-890eee58f9b7\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.252826 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32692823-592d-42f2-9e32-890eee58f9b7-secret-volume\") pod \"32692823-592d-42f2-9e32-890eee58f9b7\" (UID: \"32692823-592d-42f2-9e32-890eee58f9b7\") " Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.253834 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32692823-592d-42f2-9e32-890eee58f9b7-config-volume" (OuterVolumeSpecName: "config-volume") pod "32692823-592d-42f2-9e32-890eee58f9b7" (UID: "32692823-592d-42f2-9e32-890eee58f9b7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.254158 4634 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32692823-592d-42f2-9e32-890eee58f9b7-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.259313 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32692823-592d-42f2-9e32-890eee58f9b7-kube-api-access-qzz9z" (OuterVolumeSpecName: "kube-api-access-qzz9z") pod "32692823-592d-42f2-9e32-890eee58f9b7" (UID: "32692823-592d-42f2-9e32-890eee58f9b7"). InnerVolumeSpecName "kube-api-access-qzz9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.262218 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32692823-592d-42f2-9e32-890eee58f9b7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "32692823-592d-42f2-9e32-890eee58f9b7" (UID: "32692823-592d-42f2-9e32-890eee58f9b7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.355946 4634 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32692823-592d-42f2-9e32-890eee58f9b7-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.356004 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzz9z\" (UniqueName: \"kubernetes.io/projected/32692823-592d-42f2-9e32-890eee58f9b7-kube-api-access-qzz9z\") on node \"crc\" DevicePath \"\"" Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.773278 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" event={"ID":"32692823-592d-42f2-9e32-890eee58f9b7","Type":"ContainerDied","Data":"8106d109c14db31951e7b96c6949c3cdcbfd0ac7502320a1833f2e12c45cb2dd"} Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.773337 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8106d109c14db31951e7b96c6949c3cdcbfd0ac7502320a1833f2e12c45cb2dd" Sep 29 14:30:03 crc kubenswrapper[4634]: I0929 14:30:03.773357 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319270-vs2z6" Sep 29 14:30:04 crc kubenswrapper[4634]: I0929 14:30:04.220814 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd"] Sep 29 14:30:04 crc kubenswrapper[4634]: I0929 14:30:04.230434 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319225-lj7nd"] Sep 29 14:30:06 crc kubenswrapper[4634]: I0929 14:30:06.124444 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a7bd45c-8ec4-4af0-b4cb-812ab9b173da" path="/var/lib/kubelet/pods/1a7bd45c-8ec4-4af0-b4cb-812ab9b173da/volumes" Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.396296 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.396981 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.397046 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.397895 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"531b311522f5a8d0b24fa9743853f85282837cdcd078122b14beb225d3ea508a"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.397975 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://531b311522f5a8d0b24fa9743853f85282837cdcd078122b14beb225d3ea508a" gracePeriod=600 Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.873965 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="531b311522f5a8d0b24fa9743853f85282837cdcd078122b14beb225d3ea508a" exitCode=0 Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.874112 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"531b311522f5a8d0b24fa9743853f85282837cdcd078122b14beb225d3ea508a"} Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.874460 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675"} Sep 29 14:30:14 crc kubenswrapper[4634]: I0929 14:30:14.874497 4634 scope.go:117] "RemoveContainer" containerID="74d5db2147c3fb4ab2d0ea557c6828e34fdd7ef64b60f85eee67527098cfd423" Sep 29 14:30:53 crc kubenswrapper[4634]: I0929 14:30:53.192009 4634 scope.go:117] "RemoveContainer" containerID="35ed01940a4f8c9785aa1114b2a1d35a2615b4d58e2f51da30195ddae7f45b90" Sep 29 14:31:15 crc kubenswrapper[4634]: I0929 14:31:15.491978 4634 generic.go:334] "Generic (PLEG): container finished" podID="4dbe661a-c031-4716-9816-d5cb05957a35" containerID="fa1aaed74f30bb4af32197031b4ee1fcc4869dd0f651fe045e721fb01a653f3f" exitCode=0 Sep 29 14:31:15 crc kubenswrapper[4634]: I0929 14:31:15.492230 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" event={"ID":"4dbe661a-c031-4716-9816-d5cb05957a35","Type":"ContainerDied","Data":"fa1aaed74f30bb4af32197031b4ee1fcc4869dd0f651fe045e721fb01a653f3f"} Sep 29 14:31:16 crc kubenswrapper[4634]: I0929 14:31:16.911645 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010205 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-1\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010280 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-1\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010387 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-combined-ca-bundle\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010423 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-ssh-key\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010471 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-0\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010507 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-0\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010558 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzflc\" (UniqueName: \"kubernetes.io/projected/4dbe661a-c031-4716-9816-d5cb05957a35-kube-api-access-nzflc\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010620 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-inventory\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.010711 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/4dbe661a-c031-4716-9816-d5cb05957a35-nova-extra-config-0\") pod \"4dbe661a-c031-4716-9816-d5cb05957a35\" (UID: \"4dbe661a-c031-4716-9816-d5cb05957a35\") " Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.016684 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.028895 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dbe661a-c031-4716-9816-d5cb05957a35-kube-api-access-nzflc" (OuterVolumeSpecName: "kube-api-access-nzflc") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "kube-api-access-nzflc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.040677 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.048529 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.048960 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.051103 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.052122 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-inventory" (OuterVolumeSpecName: "inventory") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.057344 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dbe661a-c031-4716-9816-d5cb05957a35-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.064239 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "4dbe661a-c031-4716-9816-d5cb05957a35" (UID: "4dbe661a-c031-4716-9816-d5cb05957a35"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.112992 4634 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.113028 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.113038 4634 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.113046 4634 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.113056 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzflc\" (UniqueName: \"kubernetes.io/projected/4dbe661a-c031-4716-9816-d5cb05957a35-kube-api-access-nzflc\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.113065 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.113076 4634 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/4dbe661a-c031-4716-9816-d5cb05957a35-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.113107 4634 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.113118 4634 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/4dbe661a-c031-4716-9816-d5cb05957a35-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.511321 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" event={"ID":"4dbe661a-c031-4716-9816-d5cb05957a35","Type":"ContainerDied","Data":"7b2226a642d53ff87fc06b471df82e79e199adb5fd9cf14c6f3ff8947aa19b4f"} Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.511366 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b2226a642d53ff87fc06b471df82e79e199adb5fd9cf14c6f3ff8947aa19b4f" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.511419 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-x58xd" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.739050 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg"] Sep 29 14:31:17 crc kubenswrapper[4634]: E0929 14:31:17.739574 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32692823-592d-42f2-9e32-890eee58f9b7" containerName="collect-profiles" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.739590 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="32692823-592d-42f2-9e32-890eee58f9b7" containerName="collect-profiles" Sep 29 14:31:17 crc kubenswrapper[4634]: E0929 14:31:17.739606 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dbe661a-c031-4716-9816-d5cb05957a35" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.739612 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dbe661a-c031-4716-9816-d5cb05957a35" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.739780 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dbe661a-c031-4716-9816-d5cb05957a35" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.739799 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="32692823-592d-42f2-9e32-890eee58f9b7" containerName="collect-profiles" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.740568 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.743232 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.743603 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.743794 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.748320 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.749491 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-q2m66" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.761953 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg"] Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.827335 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.827423 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.827449 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.827534 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk6zj\" (UniqueName: \"kubernetes.io/projected/8e05d615-586f-430c-a8c9-f871a04f31d2-kube-api-access-mk6zj\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.827593 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.827615 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.827655 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.929481 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk6zj\" (UniqueName: \"kubernetes.io/projected/8e05d615-586f-430c-a8c9-f871a04f31d2-kube-api-access-mk6zj\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.929571 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.929594 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.929638 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.929684 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.929702 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.929724 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.934249 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.937016 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.938126 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.939667 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.939874 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.940329 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:17 crc kubenswrapper[4634]: I0929 14:31:17.950135 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk6zj\" (UniqueName: \"kubernetes.io/projected/8e05d615-586f-430c-a8c9-f871a04f31d2-kube-api-access-mk6zj\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-f78vg\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:18 crc kubenswrapper[4634]: I0929 14:31:18.064262 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:31:18 crc kubenswrapper[4634]: I0929 14:31:18.612518 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg"] Sep 29 14:31:19 crc kubenswrapper[4634]: I0929 14:31:19.542481 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" event={"ID":"8e05d615-586f-430c-a8c9-f871a04f31d2","Type":"ContainerStarted","Data":"5fe1223dcaccde34f6874a816c19c1e144606071c32464d3881e449d9ae92235"} Sep 29 14:31:19 crc kubenswrapper[4634]: I0929 14:31:19.543042 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" event={"ID":"8e05d615-586f-430c-a8c9-f871a04f31d2","Type":"ContainerStarted","Data":"b064a00deaebec836c1290530a9e3c088be003a545eeac2ea5b8c8a4429e91e9"} Sep 29 14:31:19 crc kubenswrapper[4634]: I0929 14:31:19.573832 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" podStartSLOduration=2.405431659 podStartE2EDuration="2.573809715s" podCreationTimestamp="2025-09-29 14:31:17 +0000 UTC" firstStartedPulling="2025-09-29 14:31:18.61755277 +0000 UTC m=+2809.186280529" lastFinishedPulling="2025-09-29 14:31:18.785930836 +0000 UTC m=+2809.354658585" observedRunningTime="2025-09-29 14:31:19.572016375 +0000 UTC m=+2810.140744164" watchObservedRunningTime="2025-09-29 14:31:19.573809715 +0000 UTC m=+2810.142537494" Sep 29 14:32:14 crc kubenswrapper[4634]: I0929 14:32:14.396539 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:32:14 crc kubenswrapper[4634]: I0929 14:32:14.397150 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:32:44 crc kubenswrapper[4634]: I0929 14:32:44.396869 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:32:44 crc kubenswrapper[4634]: I0929 14:32:44.398341 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.396287 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.397117 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.397202 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.398449 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.398562 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" gracePeriod=600 Sep 29 14:33:14 crc kubenswrapper[4634]: E0929 14:33:14.528316 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.846660 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" exitCode=0 Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.846725 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675"} Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.846816 4634 scope.go:117] "RemoveContainer" containerID="531b311522f5a8d0b24fa9743853f85282837cdcd078122b14beb225d3ea508a" Sep 29 14:33:14 crc kubenswrapper[4634]: I0929 14:33:14.848150 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:33:14 crc kubenswrapper[4634]: E0929 14:33:14.849012 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:33:25 crc kubenswrapper[4634]: I0929 14:33:25.110660 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:33:25 crc kubenswrapper[4634]: E0929 14:33:25.111889 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:33:40 crc kubenswrapper[4634]: I0929 14:33:40.117658 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:33:40 crc kubenswrapper[4634]: E0929 14:33:40.118708 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:33:55 crc kubenswrapper[4634]: I0929 14:33:55.110335 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:33:55 crc kubenswrapper[4634]: E0929 14:33:55.111060 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:34:06 crc kubenswrapper[4634]: I0929 14:34:06.110485 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:34:06 crc kubenswrapper[4634]: E0929 14:34:06.111270 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:34:19 crc kubenswrapper[4634]: I0929 14:34:19.110071 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:34:19 crc kubenswrapper[4634]: E0929 14:34:19.111020 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:34:32 crc kubenswrapper[4634]: I0929 14:34:32.111110 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:34:32 crc kubenswrapper[4634]: E0929 14:34:32.111997 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:34:45 crc kubenswrapper[4634]: I0929 14:34:45.111383 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:34:45 crc kubenswrapper[4634]: E0929 14:34:45.112301 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:34:56 crc kubenswrapper[4634]: I0929 14:34:56.110338 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:34:56 crc kubenswrapper[4634]: E0929 14:34:56.111134 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:35:00 crc kubenswrapper[4634]: I0929 14:35:00.920385 4634 generic.go:334] "Generic (PLEG): container finished" podID="8e05d615-586f-430c-a8c9-f871a04f31d2" containerID="5fe1223dcaccde34f6874a816c19c1e144606071c32464d3881e449d9ae92235" exitCode=0 Sep 29 14:35:00 crc kubenswrapper[4634]: I0929 14:35:00.921270 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" event={"ID":"8e05d615-586f-430c-a8c9-f871a04f31d2","Type":"ContainerDied","Data":"5fe1223dcaccde34f6874a816c19c1e144606071c32464d3881e449d9ae92235"} Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.379257 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.407744 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-0\") pod \"8e05d615-586f-430c-a8c9-f871a04f31d2\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.407839 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-2\") pod \"8e05d615-586f-430c-a8c9-f871a04f31d2\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.407868 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-inventory\") pod \"8e05d615-586f-430c-a8c9-f871a04f31d2\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.407945 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-telemetry-combined-ca-bundle\") pod \"8e05d615-586f-430c-a8c9-f871a04f31d2\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.408060 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk6zj\" (UniqueName: \"kubernetes.io/projected/8e05d615-586f-430c-a8c9-f871a04f31d2-kube-api-access-mk6zj\") pod \"8e05d615-586f-430c-a8c9-f871a04f31d2\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.408127 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-1\") pod \"8e05d615-586f-430c-a8c9-f871a04f31d2\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.408172 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ssh-key\") pod \"8e05d615-586f-430c-a8c9-f871a04f31d2\" (UID: \"8e05d615-586f-430c-a8c9-f871a04f31d2\") " Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.439012 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "8e05d615-586f-430c-a8c9-f871a04f31d2" (UID: "8e05d615-586f-430c-a8c9-f871a04f31d2"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.441894 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e05d615-586f-430c-a8c9-f871a04f31d2-kube-api-access-mk6zj" (OuterVolumeSpecName: "kube-api-access-mk6zj") pod "8e05d615-586f-430c-a8c9-f871a04f31d2" (UID: "8e05d615-586f-430c-a8c9-f871a04f31d2"). InnerVolumeSpecName "kube-api-access-mk6zj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.446040 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-inventory" (OuterVolumeSpecName: "inventory") pod "8e05d615-586f-430c-a8c9-f871a04f31d2" (UID: "8e05d615-586f-430c-a8c9-f871a04f31d2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.447652 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "8e05d615-586f-430c-a8c9-f871a04f31d2" (UID: "8e05d615-586f-430c-a8c9-f871a04f31d2"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.460130 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8e05d615-586f-430c-a8c9-f871a04f31d2" (UID: "8e05d615-586f-430c-a8c9-f871a04f31d2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.479077 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "8e05d615-586f-430c-a8c9-f871a04f31d2" (UID: "8e05d615-586f-430c-a8c9-f871a04f31d2"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.510838 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk6zj\" (UniqueName: \"kubernetes.io/projected/8e05d615-586f-430c-a8c9-f871a04f31d2-kube-api-access-mk6zj\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.510899 4634 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.510912 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.510924 4634 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.510936 4634 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.510946 4634 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.517544 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "8e05d615-586f-430c-a8c9-f871a04f31d2" (UID: "8e05d615-586f-430c-a8c9-f871a04f31d2"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.612679 4634 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/8e05d615-586f-430c-a8c9-f871a04f31d2-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.944384 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" event={"ID":"8e05d615-586f-430c-a8c9-f871a04f31d2","Type":"ContainerDied","Data":"b064a00deaebec836c1290530a9e3c088be003a545eeac2ea5b8c8a4429e91e9"} Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.944454 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-f78vg" Sep 29 14:35:02 crc kubenswrapper[4634]: I0929 14:35:02.944465 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b064a00deaebec836c1290530a9e3c088be003a545eeac2ea5b8c8a4429e91e9" Sep 29 14:35:10 crc kubenswrapper[4634]: I0929 14:35:10.116155 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:35:10 crc kubenswrapper[4634]: E0929 14:35:10.116991 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:35:23 crc kubenswrapper[4634]: I0929 14:35:23.110630 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:35:23 crc kubenswrapper[4634]: E0929 14:35:23.112762 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:35:37 crc kubenswrapper[4634]: I0929 14:35:37.112321 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:35:37 crc kubenswrapper[4634]: E0929 14:35:37.114867 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:35:49 crc kubenswrapper[4634]: I0929 14:35:49.109934 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:35:49 crc kubenswrapper[4634]: E0929 14:35:49.110637 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:35:53 crc kubenswrapper[4634]: I0929 14:35:53.414424 4634 scope.go:117] "RemoveContainer" containerID="95a53188cd1e26a98314f778b2b931c4f9e1d95ea3ffc65bfe4fcf0c66c588f2" Sep 29 14:35:53 crc kubenswrapper[4634]: I0929 14:35:53.436306 4634 scope.go:117] "RemoveContainer" containerID="5a715e8b4d1c1077487c37b91827f454ee219ad33143a98d47db292e235f06c4" Sep 29 14:35:53 crc kubenswrapper[4634]: I0929 14:35:53.478410 4634 scope.go:117] "RemoveContainer" containerID="3cd08677bc71cae6f48e5128da0a59f7ffc8475eb303425ae314b781b629d88d" Sep 29 14:36:04 crc kubenswrapper[4634]: I0929 14:36:04.111851 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:36:04 crc kubenswrapper[4634]: E0929 14:36:04.112958 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.918614 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 14:36:05 crc kubenswrapper[4634]: E0929 14:36:05.919391 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e05d615-586f-430c-a8c9-f871a04f31d2" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.919411 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e05d615-586f-430c-a8c9-f871a04f31d2" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.919666 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e05d615-586f-430c-a8c9-f871a04f31d2" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.920481 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.927659 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.927761 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.927856 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.927786 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bh8f2" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.938025 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xknrd\" (UniqueName: \"kubernetes.io/projected/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-kube-api-access-xknrd\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.938359 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.938478 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.938576 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.938680 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.938881 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.939033 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-config-data\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.939213 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.939240 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 14:36:05 crc kubenswrapper[4634]: I0929 14:36:05.939486 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041039 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xknrd\" (UniqueName: \"kubernetes.io/projected/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-kube-api-access-xknrd\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041119 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041151 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041178 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041214 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041262 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041324 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-config-data\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041349 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.041381 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.042040 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.042395 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.042505 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.042828 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.043282 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-config-data\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.049646 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.049846 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.051380 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.056808 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xknrd\" (UniqueName: \"kubernetes.io/projected/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-kube-api-access-xknrd\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.080365 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.300476 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.747227 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 14:36:06 crc kubenswrapper[4634]: W0929 14:36:06.751774 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1753ec8d_9af3_4930_a9d7_88b1c2f440cb.slice/crio-507e18f4730c1dd478e6d7fb4cb14d218b3bc919645d4e5d98168d6d3be4f92f WatchSource:0}: Error finding container 507e18f4730c1dd478e6d7fb4cb14d218b3bc919645d4e5d98168d6d3be4f92f: Status 404 returned error can't find the container with id 507e18f4730c1dd478e6d7fb4cb14d218b3bc919645d4e5d98168d6d3be4f92f Sep 29 14:36:06 crc kubenswrapper[4634]: I0929 14:36:06.755463 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:36:07 crc kubenswrapper[4634]: I0929 14:36:07.548912 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"1753ec8d-9af3-4930-a9d7-88b1c2f440cb","Type":"ContainerStarted","Data":"507e18f4730c1dd478e6d7fb4cb14d218b3bc919645d4e5d98168d6d3be4f92f"} Sep 29 14:36:16 crc kubenswrapper[4634]: I0929 14:36:16.111890 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:36:16 crc kubenswrapper[4634]: E0929 14:36:16.112748 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:36:29 crc kubenswrapper[4634]: I0929 14:36:29.110515 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:36:29 crc kubenswrapper[4634]: E0929 14:36:29.111282 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:36:40 crc kubenswrapper[4634]: E0929 14:36:40.357979 4634 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Sep 29 14:36:40 crc kubenswrapper[4634]: E0929 14:36:40.364637 4634 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xknrd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(1753ec8d-9af3-4930-a9d7-88b1c2f440cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 14:36:40 crc kubenswrapper[4634]: E0929 14:36:40.365963 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="1753ec8d-9af3-4930-a9d7-88b1c2f440cb" Sep 29 14:36:40 crc kubenswrapper[4634]: E0929 14:36:40.888885 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="1753ec8d-9af3-4930-a9d7-88b1c2f440cb" Sep 29 14:36:45 crc kubenswrapper[4634]: I0929 14:36:45.110957 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:36:45 crc kubenswrapper[4634]: E0929 14:36:45.111827 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:36:55 crc kubenswrapper[4634]: I0929 14:36:55.612655 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 14:36:57 crc kubenswrapper[4634]: I0929 14:36:57.050557 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"1753ec8d-9af3-4930-a9d7-88b1c2f440cb","Type":"ContainerStarted","Data":"d96546c07ac1777ef3c55c2ed0b430cd98d4b0c000d53e49da0c095fcbacc66c"} Sep 29 14:36:57 crc kubenswrapper[4634]: I0929 14:36:57.074072 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.218798747 podStartE2EDuration="53.074026027s" podCreationTimestamp="2025-09-29 14:36:04 +0000 UTC" firstStartedPulling="2025-09-29 14:36:06.755236724 +0000 UTC m=+3097.323964473" lastFinishedPulling="2025-09-29 14:36:55.610464004 +0000 UTC m=+3146.179191753" observedRunningTime="2025-09-29 14:36:57.070647685 +0000 UTC m=+3147.639375434" watchObservedRunningTime="2025-09-29 14:36:57.074026027 +0000 UTC m=+3147.642753776" Sep 29 14:37:00 crc kubenswrapper[4634]: I0929 14:37:00.117115 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:37:00 crc kubenswrapper[4634]: E0929 14:37:00.117749 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:37:12 crc kubenswrapper[4634]: I0929 14:37:12.110821 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:37:12 crc kubenswrapper[4634]: E0929 14:37:12.111787 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:37:23 crc kubenswrapper[4634]: I0929 14:37:23.109820 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:37:23 crc kubenswrapper[4634]: E0929 14:37:23.110543 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:37:37 crc kubenswrapper[4634]: I0929 14:37:37.110344 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:37:37 crc kubenswrapper[4634]: E0929 14:37:37.111160 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.399045 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2s9p8"] Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.409564 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.418308 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2s9p8"] Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.467456 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-catalog-content\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.467562 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-utilities\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.467604 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97q7v\" (UniqueName: \"kubernetes.io/projected/bd1447a5-2e86-453d-81c4-db62ae14bdfc-kube-api-access-97q7v\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.569256 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-catalog-content\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.569363 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-utilities\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.569417 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97q7v\" (UniqueName: \"kubernetes.io/projected/bd1447a5-2e86-453d-81c4-db62ae14bdfc-kube-api-access-97q7v\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.570339 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-catalog-content\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.570606 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-utilities\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.592903 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97q7v\" (UniqueName: \"kubernetes.io/projected/bd1447a5-2e86-453d-81c4-db62ae14bdfc-kube-api-access-97q7v\") pod \"certified-operators-2s9p8\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:43 crc kubenswrapper[4634]: I0929 14:37:43.738072 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:44 crc kubenswrapper[4634]: I0929 14:37:44.903606 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2s9p8"] Sep 29 14:37:45 crc kubenswrapper[4634]: I0929 14:37:45.572365 4634 generic.go:334] "Generic (PLEG): container finished" podID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerID="1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06" exitCode=0 Sep 29 14:37:45 crc kubenswrapper[4634]: I0929 14:37:45.572803 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2s9p8" event={"ID":"bd1447a5-2e86-453d-81c4-db62ae14bdfc","Type":"ContainerDied","Data":"1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06"} Sep 29 14:37:45 crc kubenswrapper[4634]: I0929 14:37:45.572936 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2s9p8" event={"ID":"bd1447a5-2e86-453d-81c4-db62ae14bdfc","Type":"ContainerStarted","Data":"2439d580e93cfcb04c1fa7683e7219ffe129ba270d42f1e79ca4339748318599"} Sep 29 14:37:46 crc kubenswrapper[4634]: I0929 14:37:46.588372 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2s9p8" event={"ID":"bd1447a5-2e86-453d-81c4-db62ae14bdfc","Type":"ContainerStarted","Data":"955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b"} Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.597178 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-98n6p"] Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.600618 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.626232 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-98n6p"] Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.677125 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-catalog-content\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.677244 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-utilities\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.677308 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5z6p\" (UniqueName: \"kubernetes.io/projected/78de47f0-3c47-4353-be94-a7eebb381372-kube-api-access-q5z6p\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.779076 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5z6p\" (UniqueName: \"kubernetes.io/projected/78de47f0-3c47-4353-be94-a7eebb381372-kube-api-access-q5z6p\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.779206 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-catalog-content\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.779292 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-utilities\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.779703 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-utilities\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.780554 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-catalog-content\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.812166 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5z6p\" (UniqueName: \"kubernetes.io/projected/78de47f0-3c47-4353-be94-a7eebb381372-kube-api-access-q5z6p\") pod \"redhat-marketplace-98n6p\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:47 crc kubenswrapper[4634]: I0929 14:37:47.925538 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.112741 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:37:48 crc kubenswrapper[4634]: E0929 14:37:48.113369 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.206862 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-59w77"] Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.209836 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.226074 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-59w77"] Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.290838 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-utilities\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.290910 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69lfr\" (UniqueName: \"kubernetes.io/projected/3215aeab-32a5-467f-a570-5d144b6c4c2d-kube-api-access-69lfr\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.291234 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-catalog-content\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.394337 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-utilities\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.395830 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69lfr\" (UniqueName: \"kubernetes.io/projected/3215aeab-32a5-467f-a570-5d144b6c4c2d-kube-api-access-69lfr\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.396048 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-catalog-content\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.395545 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-utilities\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.396588 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-catalog-content\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.427648 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69lfr\" (UniqueName: \"kubernetes.io/projected/3215aeab-32a5-467f-a570-5d144b6c4c2d-kube-api-access-69lfr\") pod \"redhat-operators-59w77\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.569210 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.615579 4634 generic.go:334] "Generic (PLEG): container finished" podID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerID="955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b" exitCode=0 Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.615631 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2s9p8" event={"ID":"bd1447a5-2e86-453d-81c4-db62ae14bdfc","Type":"ContainerDied","Data":"955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b"} Sep 29 14:37:48 crc kubenswrapper[4634]: I0929 14:37:48.646186 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-98n6p"] Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.238536 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-59w77"] Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.626499 4634 generic.go:334] "Generic (PLEG): container finished" podID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerID="b4a708304dfc4877945575b9a835660d5a3f7b849a67f99b8d7c00c1bdb5e680" exitCode=0 Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.626606 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59w77" event={"ID":"3215aeab-32a5-467f-a570-5d144b6c4c2d","Type":"ContainerDied","Data":"b4a708304dfc4877945575b9a835660d5a3f7b849a67f99b8d7c00c1bdb5e680"} Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.626671 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59w77" event={"ID":"3215aeab-32a5-467f-a570-5d144b6c4c2d","Type":"ContainerStarted","Data":"70332b80da8425be2a10603c267f1373d31c908a68f17bab6a1b5d4f64f76bbb"} Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.632464 4634 generic.go:334] "Generic (PLEG): container finished" podID="78de47f0-3c47-4353-be94-a7eebb381372" containerID="aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b" exitCode=0 Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.632550 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98n6p" event={"ID":"78de47f0-3c47-4353-be94-a7eebb381372","Type":"ContainerDied","Data":"aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b"} Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.632590 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98n6p" event={"ID":"78de47f0-3c47-4353-be94-a7eebb381372","Type":"ContainerStarted","Data":"78e52ff29a4744fd1be78ad8621e85363f2c0578070db70611a1e5dba570daf3"} Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.652337 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2s9p8" event={"ID":"bd1447a5-2e86-453d-81c4-db62ae14bdfc","Type":"ContainerStarted","Data":"e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31"} Sep 29 14:37:49 crc kubenswrapper[4634]: I0929 14:37:49.707031 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2s9p8" podStartSLOduration=3.23613097 podStartE2EDuration="6.70701011s" podCreationTimestamp="2025-09-29 14:37:43 +0000 UTC" firstStartedPulling="2025-09-29 14:37:45.577334539 +0000 UTC m=+3196.146062288" lastFinishedPulling="2025-09-29 14:37:49.048213679 +0000 UTC m=+3199.616941428" observedRunningTime="2025-09-29 14:37:49.691630661 +0000 UTC m=+3200.260358410" watchObservedRunningTime="2025-09-29 14:37:49.70701011 +0000 UTC m=+3200.275737859" Sep 29 14:37:50 crc kubenswrapper[4634]: I0929 14:37:50.663413 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59w77" event={"ID":"3215aeab-32a5-467f-a570-5d144b6c4c2d","Type":"ContainerStarted","Data":"649c65690f6559cec2d18b41c0aa3ad88a3811c92f753640b3230b1810854f6f"} Sep 29 14:37:50 crc kubenswrapper[4634]: I0929 14:37:50.667410 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98n6p" event={"ID":"78de47f0-3c47-4353-be94-a7eebb381372","Type":"ContainerStarted","Data":"05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a"} Sep 29 14:37:51 crc kubenswrapper[4634]: I0929 14:37:51.678571 4634 generic.go:334] "Generic (PLEG): container finished" podID="78de47f0-3c47-4353-be94-a7eebb381372" containerID="05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a" exitCode=0 Sep 29 14:37:51 crc kubenswrapper[4634]: I0929 14:37:51.679853 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98n6p" event={"ID":"78de47f0-3c47-4353-be94-a7eebb381372","Type":"ContainerDied","Data":"05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a"} Sep 29 14:37:53 crc kubenswrapper[4634]: I0929 14:37:53.700415 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98n6p" event={"ID":"78de47f0-3c47-4353-be94-a7eebb381372","Type":"ContainerStarted","Data":"d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7"} Sep 29 14:37:53 crc kubenswrapper[4634]: I0929 14:37:53.738403 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:53 crc kubenswrapper[4634]: I0929 14:37:53.738475 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:37:54 crc kubenswrapper[4634]: I0929 14:37:54.712345 4634 generic.go:334] "Generic (PLEG): container finished" podID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerID="649c65690f6559cec2d18b41c0aa3ad88a3811c92f753640b3230b1810854f6f" exitCode=0 Sep 29 14:37:54 crc kubenswrapper[4634]: I0929 14:37:54.712388 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59w77" event={"ID":"3215aeab-32a5-467f-a570-5d144b6c4c2d","Type":"ContainerDied","Data":"649c65690f6559cec2d18b41c0aa3ad88a3811c92f753640b3230b1810854f6f"} Sep 29 14:37:54 crc kubenswrapper[4634]: I0929 14:37:54.735854 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-98n6p" podStartSLOduration=4.667210201 podStartE2EDuration="7.73583636s" podCreationTimestamp="2025-09-29 14:37:47 +0000 UTC" firstStartedPulling="2025-09-29 14:37:49.636883777 +0000 UTC m=+3200.205611516" lastFinishedPulling="2025-09-29 14:37:52.705509926 +0000 UTC m=+3203.274237675" observedRunningTime="2025-09-29 14:37:53.741893107 +0000 UTC m=+3204.310620856" watchObservedRunningTime="2025-09-29 14:37:54.73583636 +0000 UTC m=+3205.304564099" Sep 29 14:37:54 crc kubenswrapper[4634]: I0929 14:37:54.805870 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-2s9p8" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="registry-server" probeResult="failure" output=< Sep 29 14:37:54 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:37:54 crc kubenswrapper[4634]: > Sep 29 14:37:55 crc kubenswrapper[4634]: I0929 14:37:55.723672 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59w77" event={"ID":"3215aeab-32a5-467f-a570-5d144b6c4c2d","Type":"ContainerStarted","Data":"3f1395d1db15def7614909af8d8ea3cd97ee77981384ba9054f98e1037cd5d18"} Sep 29 14:37:55 crc kubenswrapper[4634]: I0929 14:37:55.742191 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-59w77" podStartSLOduration=2.008840995 podStartE2EDuration="7.742169112s" podCreationTimestamp="2025-09-29 14:37:48 +0000 UTC" firstStartedPulling="2025-09-29 14:37:49.631174782 +0000 UTC m=+3200.199902531" lastFinishedPulling="2025-09-29 14:37:55.364502899 +0000 UTC m=+3205.933230648" observedRunningTime="2025-09-29 14:37:55.73991523 +0000 UTC m=+3206.308642979" watchObservedRunningTime="2025-09-29 14:37:55.742169112 +0000 UTC m=+3206.310896861" Sep 29 14:37:57 crc kubenswrapper[4634]: I0929 14:37:57.926832 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:57 crc kubenswrapper[4634]: I0929 14:37:57.927405 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:57 crc kubenswrapper[4634]: I0929 14:37:57.992321 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:58 crc kubenswrapper[4634]: I0929 14:37:58.570244 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:58 crc kubenswrapper[4634]: I0929 14:37:58.570335 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:37:58 crc kubenswrapper[4634]: I0929 14:37:58.805518 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:37:59 crc kubenswrapper[4634]: I0929 14:37:59.111300 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:37:59 crc kubenswrapper[4634]: E0929 14:37:59.111600 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:37:59 crc kubenswrapper[4634]: I0929 14:37:59.367606 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-98n6p"] Sep 29 14:37:59 crc kubenswrapper[4634]: I0929 14:37:59.616904 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-59w77" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="registry-server" probeResult="failure" output=< Sep 29 14:37:59 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:37:59 crc kubenswrapper[4634]: > Sep 29 14:38:00 crc kubenswrapper[4634]: I0929 14:38:00.771449 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-98n6p" podUID="78de47f0-3c47-4353-be94-a7eebb381372" containerName="registry-server" containerID="cri-o://d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7" gracePeriod=2 Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.452125 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.506504 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5z6p\" (UniqueName: \"kubernetes.io/projected/78de47f0-3c47-4353-be94-a7eebb381372-kube-api-access-q5z6p\") pod \"78de47f0-3c47-4353-be94-a7eebb381372\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.506599 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-utilities\") pod \"78de47f0-3c47-4353-be94-a7eebb381372\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.506686 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-catalog-content\") pod \"78de47f0-3c47-4353-be94-a7eebb381372\" (UID: \"78de47f0-3c47-4353-be94-a7eebb381372\") " Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.508436 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-utilities" (OuterVolumeSpecName: "utilities") pod "78de47f0-3c47-4353-be94-a7eebb381372" (UID: "78de47f0-3c47-4353-be94-a7eebb381372"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.519423 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78de47f0-3c47-4353-be94-a7eebb381372" (UID: "78de47f0-3c47-4353-be94-a7eebb381372"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.523840 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78de47f0-3c47-4353-be94-a7eebb381372-kube-api-access-q5z6p" (OuterVolumeSpecName: "kube-api-access-q5z6p") pod "78de47f0-3c47-4353-be94-a7eebb381372" (UID: "78de47f0-3c47-4353-be94-a7eebb381372"). InnerVolumeSpecName "kube-api-access-q5z6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.609395 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.609439 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5z6p\" (UniqueName: \"kubernetes.io/projected/78de47f0-3c47-4353-be94-a7eebb381372-kube-api-access-q5z6p\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.609452 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78de47f0-3c47-4353-be94-a7eebb381372-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.785484 4634 generic.go:334] "Generic (PLEG): container finished" podID="78de47f0-3c47-4353-be94-a7eebb381372" containerID="d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7" exitCode=0 Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.785530 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98n6p" event={"ID":"78de47f0-3c47-4353-be94-a7eebb381372","Type":"ContainerDied","Data":"d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7"} Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.785562 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98n6p" event={"ID":"78de47f0-3c47-4353-be94-a7eebb381372","Type":"ContainerDied","Data":"78e52ff29a4744fd1be78ad8621e85363f2c0578070db70611a1e5dba570daf3"} Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.785580 4634 scope.go:117] "RemoveContainer" containerID="d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.785609 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98n6p" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.843930 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-98n6p"] Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.859242 4634 scope.go:117] "RemoveContainer" containerID="05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a" Sep 29 14:38:01 crc kubenswrapper[4634]: I0929 14:38:01.869647 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-98n6p"] Sep 29 14:38:02 crc kubenswrapper[4634]: I0929 14:38:02.019179 4634 scope.go:117] "RemoveContainer" containerID="aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b" Sep 29 14:38:02 crc kubenswrapper[4634]: I0929 14:38:02.092251 4634 scope.go:117] "RemoveContainer" containerID="d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7" Sep 29 14:38:02 crc kubenswrapper[4634]: E0929 14:38:02.092758 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7\": container with ID starting with d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7 not found: ID does not exist" containerID="d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7" Sep 29 14:38:02 crc kubenswrapper[4634]: I0929 14:38:02.092818 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7"} err="failed to get container status \"d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7\": rpc error: code = NotFound desc = could not find container \"d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7\": container with ID starting with d095bb8e676eb1d4b87db1e167b562ab2d527ac3da38c9c662048fb520c740a7 not found: ID does not exist" Sep 29 14:38:02 crc kubenswrapper[4634]: I0929 14:38:02.092902 4634 scope.go:117] "RemoveContainer" containerID="05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a" Sep 29 14:38:02 crc kubenswrapper[4634]: E0929 14:38:02.093609 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a\": container with ID starting with 05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a not found: ID does not exist" containerID="05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a" Sep 29 14:38:02 crc kubenswrapper[4634]: I0929 14:38:02.093645 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a"} err="failed to get container status \"05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a\": rpc error: code = NotFound desc = could not find container \"05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a\": container with ID starting with 05b11b297f0b9453e41bab2449e6d9230a45e3db4b402ba105a2b8d53e86876a not found: ID does not exist" Sep 29 14:38:02 crc kubenswrapper[4634]: I0929 14:38:02.093663 4634 scope.go:117] "RemoveContainer" containerID="aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b" Sep 29 14:38:02 crc kubenswrapper[4634]: E0929 14:38:02.094420 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b\": container with ID starting with aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b not found: ID does not exist" containerID="aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b" Sep 29 14:38:02 crc kubenswrapper[4634]: I0929 14:38:02.094448 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b"} err="failed to get container status \"aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b\": rpc error: code = NotFound desc = could not find container \"aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b\": container with ID starting with aae868b0d4dd868d8347387289b43df47066a644c1c8c28f2526942eff46e88b not found: ID does not exist" Sep 29 14:38:02 crc kubenswrapper[4634]: I0929 14:38:02.122261 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78de47f0-3c47-4353-be94-a7eebb381372" path="/var/lib/kubelet/pods/78de47f0-3c47-4353-be94-a7eebb381372/volumes" Sep 29 14:38:03 crc kubenswrapper[4634]: I0929 14:38:03.802562 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:38:03 crc kubenswrapper[4634]: I0929 14:38:03.860066 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:38:05 crc kubenswrapper[4634]: I0929 14:38:05.570406 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2s9p8"] Sep 29 14:38:05 crc kubenswrapper[4634]: I0929 14:38:05.825308 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2s9p8" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="registry-server" containerID="cri-o://e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31" gracePeriod=2 Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.419536 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.533387 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-utilities\") pod \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.533503 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97q7v\" (UniqueName: \"kubernetes.io/projected/bd1447a5-2e86-453d-81c4-db62ae14bdfc-kube-api-access-97q7v\") pod \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.533571 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-catalog-content\") pod \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\" (UID: \"bd1447a5-2e86-453d-81c4-db62ae14bdfc\") " Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.534914 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-utilities" (OuterVolumeSpecName: "utilities") pod "bd1447a5-2e86-453d-81c4-db62ae14bdfc" (UID: "bd1447a5-2e86-453d-81c4-db62ae14bdfc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.541609 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd1447a5-2e86-453d-81c4-db62ae14bdfc-kube-api-access-97q7v" (OuterVolumeSpecName: "kube-api-access-97q7v") pod "bd1447a5-2e86-453d-81c4-db62ae14bdfc" (UID: "bd1447a5-2e86-453d-81c4-db62ae14bdfc"). InnerVolumeSpecName "kube-api-access-97q7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.603937 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd1447a5-2e86-453d-81c4-db62ae14bdfc" (UID: "bd1447a5-2e86-453d-81c4-db62ae14bdfc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.635734 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.635767 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97q7v\" (UniqueName: \"kubernetes.io/projected/bd1447a5-2e86-453d-81c4-db62ae14bdfc-kube-api-access-97q7v\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.635777 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd1447a5-2e86-453d-81c4-db62ae14bdfc-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.839071 4634 generic.go:334] "Generic (PLEG): container finished" podID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerID="e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31" exitCode=0 Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.839138 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2s9p8" event={"ID":"bd1447a5-2e86-453d-81c4-db62ae14bdfc","Type":"ContainerDied","Data":"e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31"} Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.839169 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2s9p8" event={"ID":"bd1447a5-2e86-453d-81c4-db62ae14bdfc","Type":"ContainerDied","Data":"2439d580e93cfcb04c1fa7683e7219ffe129ba270d42f1e79ca4339748318599"} Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.839190 4634 scope.go:117] "RemoveContainer" containerID="e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.839364 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2s9p8" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.883820 4634 scope.go:117] "RemoveContainer" containerID="955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.893032 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2s9p8"] Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.904479 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2s9p8"] Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.912740 4634 scope.go:117] "RemoveContainer" containerID="1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.972218 4634 scope.go:117] "RemoveContainer" containerID="e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31" Sep 29 14:38:06 crc kubenswrapper[4634]: E0929 14:38:06.972758 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31\": container with ID starting with e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31 not found: ID does not exist" containerID="e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.972831 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31"} err="failed to get container status \"e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31\": rpc error: code = NotFound desc = could not find container \"e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31\": container with ID starting with e6c8a154308209ced950f6fc4b1f266c8e3a10f48c489e471c5009a4cf90da31 not found: ID does not exist" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.972886 4634 scope.go:117] "RemoveContainer" containerID="955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b" Sep 29 14:38:06 crc kubenswrapper[4634]: E0929 14:38:06.973431 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b\": container with ID starting with 955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b not found: ID does not exist" containerID="955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.973484 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b"} err="failed to get container status \"955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b\": rpc error: code = NotFound desc = could not find container \"955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b\": container with ID starting with 955466e186c5de529fe28a9cd65fcb083a3b4361344b1f06e544c432654e1c2b not found: ID does not exist" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.973512 4634 scope.go:117] "RemoveContainer" containerID="1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06" Sep 29 14:38:06 crc kubenswrapper[4634]: E0929 14:38:06.973746 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06\": container with ID starting with 1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06 not found: ID does not exist" containerID="1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06" Sep 29 14:38:06 crc kubenswrapper[4634]: I0929 14:38:06.973772 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06"} err="failed to get container status \"1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06\": rpc error: code = NotFound desc = could not find container \"1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06\": container with ID starting with 1257af6335d1f20ab3a6fc1fa85e5000aedd7c982b42f77f4f1ffa70712a6d06 not found: ID does not exist" Sep 29 14:38:08 crc kubenswrapper[4634]: I0929 14:38:08.129287 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" path="/var/lib/kubelet/pods/bd1447a5-2e86-453d-81c4-db62ae14bdfc/volumes" Sep 29 14:38:08 crc kubenswrapper[4634]: I0929 14:38:08.633073 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:38:08 crc kubenswrapper[4634]: I0929 14:38:08.688987 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:38:10 crc kubenswrapper[4634]: I0929 14:38:10.375882 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-59w77"] Sep 29 14:38:10 crc kubenswrapper[4634]: I0929 14:38:10.376254 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-59w77" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="registry-server" containerID="cri-o://3f1395d1db15def7614909af8d8ea3cd97ee77981384ba9054f98e1037cd5d18" gracePeriod=2 Sep 29 14:38:10 crc kubenswrapper[4634]: I0929 14:38:10.885400 4634 generic.go:334] "Generic (PLEG): container finished" podID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerID="3f1395d1db15def7614909af8d8ea3cd97ee77981384ba9054f98e1037cd5d18" exitCode=0 Sep 29 14:38:10 crc kubenswrapper[4634]: I0929 14:38:10.885494 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59w77" event={"ID":"3215aeab-32a5-467f-a570-5d144b6c4c2d","Type":"ContainerDied","Data":"3f1395d1db15def7614909af8d8ea3cd97ee77981384ba9054f98e1037cd5d18"} Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.017647 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.037536 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-catalog-content\") pod \"3215aeab-32a5-467f-a570-5d144b6c4c2d\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.037755 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-utilities\") pod \"3215aeab-32a5-467f-a570-5d144b6c4c2d\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.038077 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69lfr\" (UniqueName: \"kubernetes.io/projected/3215aeab-32a5-467f-a570-5d144b6c4c2d-kube-api-access-69lfr\") pod \"3215aeab-32a5-467f-a570-5d144b6c4c2d\" (UID: \"3215aeab-32a5-467f-a570-5d144b6c4c2d\") " Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.038336 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-utilities" (OuterVolumeSpecName: "utilities") pod "3215aeab-32a5-467f-a570-5d144b6c4c2d" (UID: "3215aeab-32a5-467f-a570-5d144b6c4c2d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.039348 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.046055 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3215aeab-32a5-467f-a570-5d144b6c4c2d-kube-api-access-69lfr" (OuterVolumeSpecName: "kube-api-access-69lfr") pod "3215aeab-32a5-467f-a570-5d144b6c4c2d" (UID: "3215aeab-32a5-467f-a570-5d144b6c4c2d"). InnerVolumeSpecName "kube-api-access-69lfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.140741 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69lfr\" (UniqueName: \"kubernetes.io/projected/3215aeab-32a5-467f-a570-5d144b6c4c2d-kube-api-access-69lfr\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.159021 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3215aeab-32a5-467f-a570-5d144b6c4c2d" (UID: "3215aeab-32a5-467f-a570-5d144b6c4c2d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.242493 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3215aeab-32a5-467f-a570-5d144b6c4c2d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.904828 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59w77" event={"ID":"3215aeab-32a5-467f-a570-5d144b6c4c2d","Type":"ContainerDied","Data":"70332b80da8425be2a10603c267f1373d31c908a68f17bab6a1b5d4f64f76bbb"} Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.904914 4634 scope.go:117] "RemoveContainer" containerID="3f1395d1db15def7614909af8d8ea3cd97ee77981384ba9054f98e1037cd5d18" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.904952 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59w77" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.946454 4634 scope.go:117] "RemoveContainer" containerID="649c65690f6559cec2d18b41c0aa3ad88a3811c92f753640b3230b1810854f6f" Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.951731 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-59w77"] Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.969461 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-59w77"] Sep 29 14:38:11 crc kubenswrapper[4634]: I0929 14:38:11.980486 4634 scope.go:117] "RemoveContainer" containerID="b4a708304dfc4877945575b9a835660d5a3f7b849a67f99b8d7c00c1bdb5e680" Sep 29 14:38:12 crc kubenswrapper[4634]: I0929 14:38:12.126924 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" path="/var/lib/kubelet/pods/3215aeab-32a5-467f-a570-5d144b6c4c2d/volumes" Sep 29 14:38:14 crc kubenswrapper[4634]: I0929 14:38:14.110942 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:38:14 crc kubenswrapper[4634]: E0929 14:38:14.111722 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:38:25 crc kubenswrapper[4634]: I0929 14:38:25.111152 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:38:26 crc kubenswrapper[4634]: I0929 14:38:26.058382 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"891ef3c942be7a286ef00514772523e961e9b293c3d77750845264de229b3602"} Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.037307 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qzmcz"] Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038615 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="extract-content" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038628 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="extract-content" Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038651 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78de47f0-3c47-4353-be94-a7eebb381372" containerName="extract-content" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038657 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="78de47f0-3c47-4353-be94-a7eebb381372" containerName="extract-content" Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038670 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038676 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038686 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78de47f0-3c47-4353-be94-a7eebb381372" containerName="extract-utilities" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038692 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="78de47f0-3c47-4353-be94-a7eebb381372" containerName="extract-utilities" Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038702 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="extract-utilities" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038708 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="extract-utilities" Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038720 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78de47f0-3c47-4353-be94-a7eebb381372" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038725 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="78de47f0-3c47-4353-be94-a7eebb381372" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038737 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="extract-utilities" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038743 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="extract-utilities" Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038765 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="extract-content" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038771 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="extract-content" Sep 29 14:39:39 crc kubenswrapper[4634]: E0929 14:39:39.038779 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038784 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038959 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="3215aeab-32a5-467f-a570-5d144b6c4c2d" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038977 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd1447a5-2e86-453d-81c4-db62ae14bdfc" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.038990 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="78de47f0-3c47-4353-be94-a7eebb381372" containerName="registry-server" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.040337 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.058975 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qzmcz"] Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.134041 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-utilities\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.134488 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2qsv\" (UniqueName: \"kubernetes.io/projected/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-kube-api-access-b2qsv\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.134675 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-catalog-content\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.237435 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-catalog-content\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.238036 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-utilities\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.238103 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2qsv\" (UniqueName: \"kubernetes.io/projected/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-kube-api-access-b2qsv\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.239891 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-catalog-content\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.240263 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-utilities\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.261871 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2qsv\" (UniqueName: \"kubernetes.io/projected/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-kube-api-access-b2qsv\") pod \"community-operators-qzmcz\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:39 crc kubenswrapper[4634]: I0929 14:39:39.364886 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:40 crc kubenswrapper[4634]: I0929 14:39:40.021703 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qzmcz"] Sep 29 14:39:40 crc kubenswrapper[4634]: I0929 14:39:40.847477 4634 generic.go:334] "Generic (PLEG): container finished" podID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerID="e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20" exitCode=0 Sep 29 14:39:40 crc kubenswrapper[4634]: I0929 14:39:40.847566 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzmcz" event={"ID":"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6","Type":"ContainerDied","Data":"e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20"} Sep 29 14:39:40 crc kubenswrapper[4634]: I0929 14:39:40.847781 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzmcz" event={"ID":"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6","Type":"ContainerStarted","Data":"4fd259704bb46abe66e4dc8c829c663e3333a72d3557655fb8d64825847d9584"} Sep 29 14:39:41 crc kubenswrapper[4634]: I0929 14:39:41.901190 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzmcz" event={"ID":"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6","Type":"ContainerStarted","Data":"97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4"} Sep 29 14:39:43 crc kubenswrapper[4634]: I0929 14:39:43.925367 4634 generic.go:334] "Generic (PLEG): container finished" podID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerID="97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4" exitCode=0 Sep 29 14:39:43 crc kubenswrapper[4634]: I0929 14:39:43.925442 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzmcz" event={"ID":"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6","Type":"ContainerDied","Data":"97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4"} Sep 29 14:39:44 crc kubenswrapper[4634]: I0929 14:39:44.945569 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzmcz" event={"ID":"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6","Type":"ContainerStarted","Data":"9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70"} Sep 29 14:39:44 crc kubenswrapper[4634]: I0929 14:39:44.975863 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qzmcz" podStartSLOduration=2.415277926 podStartE2EDuration="5.975837603s" podCreationTimestamp="2025-09-29 14:39:39 +0000 UTC" firstStartedPulling="2025-09-29 14:39:40.850940252 +0000 UTC m=+3311.419668031" lastFinishedPulling="2025-09-29 14:39:44.411499959 +0000 UTC m=+3314.980227708" observedRunningTime="2025-09-29 14:39:44.968915894 +0000 UTC m=+3315.537643643" watchObservedRunningTime="2025-09-29 14:39:44.975837603 +0000 UTC m=+3315.544565352" Sep 29 14:39:49 crc kubenswrapper[4634]: I0929 14:39:49.365210 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:49 crc kubenswrapper[4634]: I0929 14:39:49.366842 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:49 crc kubenswrapper[4634]: I0929 14:39:49.430812 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:50 crc kubenswrapper[4634]: I0929 14:39:50.067712 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:50 crc kubenswrapper[4634]: I0929 14:39:50.808990 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qzmcz"] Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.010318 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qzmcz" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerName="registry-server" containerID="cri-o://9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70" gracePeriod=2 Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.667975 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.828126 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2qsv\" (UniqueName: \"kubernetes.io/projected/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-kube-api-access-b2qsv\") pod \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.828611 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-catalog-content\") pod \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.828855 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-utilities\") pod \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\" (UID: \"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6\") " Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.830523 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-utilities" (OuterVolumeSpecName: "utilities") pod "61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" (UID: "61b1ee69-3e86-4c9f-8f8f-fc7f795612f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.847420 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-kube-api-access-b2qsv" (OuterVolumeSpecName: "kube-api-access-b2qsv") pod "61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" (UID: "61b1ee69-3e86-4c9f-8f8f-fc7f795612f6"). InnerVolumeSpecName "kube-api-access-b2qsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.884332 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" (UID: "61b1ee69-3e86-4c9f-8f8f-fc7f795612f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.931562 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.931727 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2qsv\" (UniqueName: \"kubernetes.io/projected/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-kube-api-access-b2qsv\") on node \"crc\" DevicePath \"\"" Sep 29 14:39:52 crc kubenswrapper[4634]: I0929 14:39:52.931817 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.021993 4634 generic.go:334] "Generic (PLEG): container finished" podID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerID="9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70" exitCode=0 Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.022047 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzmcz" event={"ID":"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6","Type":"ContainerDied","Data":"9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70"} Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.022096 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qzmcz" event={"ID":"61b1ee69-3e86-4c9f-8f8f-fc7f795612f6","Type":"ContainerDied","Data":"4fd259704bb46abe66e4dc8c829c663e3333a72d3557655fb8d64825847d9584"} Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.022120 4634 scope.go:117] "RemoveContainer" containerID="9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.022203 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qzmcz" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.049347 4634 scope.go:117] "RemoveContainer" containerID="97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.067268 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qzmcz"] Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.081365 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qzmcz"] Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.091501 4634 scope.go:117] "RemoveContainer" containerID="e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.121445 4634 scope.go:117] "RemoveContainer" containerID="9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70" Sep 29 14:39:53 crc kubenswrapper[4634]: E0929 14:39:53.124134 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70\": container with ID starting with 9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70 not found: ID does not exist" containerID="9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.124172 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70"} err="failed to get container status \"9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70\": rpc error: code = NotFound desc = could not find container \"9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70\": container with ID starting with 9d8445a0dbaa676e8838f5608ce70e22ff3a2f227281c54273171ee6242f3e70 not found: ID does not exist" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.124197 4634 scope.go:117] "RemoveContainer" containerID="97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4" Sep 29 14:39:53 crc kubenswrapper[4634]: E0929 14:39:53.124769 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4\": container with ID starting with 97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4 not found: ID does not exist" containerID="97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.124789 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4"} err="failed to get container status \"97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4\": rpc error: code = NotFound desc = could not find container \"97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4\": container with ID starting with 97359306142c9dd88fe82c7f01cc05149d0b7972cc45af3dd07852f488ff64b4 not found: ID does not exist" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.124802 4634 scope.go:117] "RemoveContainer" containerID="e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20" Sep 29 14:39:53 crc kubenswrapper[4634]: E0929 14:39:53.124976 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20\": container with ID starting with e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20 not found: ID does not exist" containerID="e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20" Sep 29 14:39:53 crc kubenswrapper[4634]: I0929 14:39:53.124993 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20"} err="failed to get container status \"e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20\": rpc error: code = NotFound desc = could not find container \"e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20\": container with ID starting with e256004d31db66a2a8b4c5716885da926bc81829ffbaf7f30cbdbc6bfd7c7b20 not found: ID does not exist" Sep 29 14:39:54 crc kubenswrapper[4634]: I0929 14:39:54.121328 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" path="/var/lib/kubelet/pods/61b1ee69-3e86-4c9f-8f8f-fc7f795612f6/volumes" Sep 29 14:40:44 crc kubenswrapper[4634]: I0929 14:40:44.395666 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:40:44 crc kubenswrapper[4634]: I0929 14:40:44.396716 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:41:14 crc kubenswrapper[4634]: I0929 14:41:14.395825 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:41:14 crc kubenswrapper[4634]: I0929 14:41:14.396698 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:41:44 crc kubenswrapper[4634]: I0929 14:41:44.395786 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:41:44 crc kubenswrapper[4634]: I0929 14:41:44.396693 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:41:44 crc kubenswrapper[4634]: I0929 14:41:44.396765 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:41:44 crc kubenswrapper[4634]: I0929 14:41:44.397974 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"891ef3c942be7a286ef00514772523e961e9b293c3d77750845264de229b3602"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:41:44 crc kubenswrapper[4634]: I0929 14:41:44.398248 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://891ef3c942be7a286ef00514772523e961e9b293c3d77750845264de229b3602" gracePeriod=600 Sep 29 14:41:45 crc kubenswrapper[4634]: I0929 14:41:45.269070 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="891ef3c942be7a286ef00514772523e961e9b293c3d77750845264de229b3602" exitCode=0 Sep 29 14:41:45 crc kubenswrapper[4634]: I0929 14:41:45.269144 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"891ef3c942be7a286ef00514772523e961e9b293c3d77750845264de229b3602"} Sep 29 14:41:45 crc kubenswrapper[4634]: I0929 14:41:45.269529 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf"} Sep 29 14:41:45 crc kubenswrapper[4634]: I0929 14:41:45.269560 4634 scope.go:117] "RemoveContainer" containerID="27786af1e50f620000ffc28578b9ebbe0e9945cab99506ef7d04f3fbb0810675" Sep 29 14:43:44 crc kubenswrapper[4634]: I0929 14:43:44.395714 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:43:44 crc kubenswrapper[4634]: I0929 14:43:44.396450 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:44:14 crc kubenswrapper[4634]: I0929 14:44:14.396161 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:44:14 crc kubenswrapper[4634]: I0929 14:44:14.396874 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:44:44 crc kubenswrapper[4634]: I0929 14:44:44.395605 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:44:44 crc kubenswrapper[4634]: I0929 14:44:44.396380 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:44:44 crc kubenswrapper[4634]: I0929 14:44:44.396428 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:44:44 crc kubenswrapper[4634]: I0929 14:44:44.397328 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:44:44 crc kubenswrapper[4634]: I0929 14:44:44.397387 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" gracePeriod=600 Sep 29 14:44:44 crc kubenswrapper[4634]: E0929 14:44:44.571420 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:44:45 crc kubenswrapper[4634]: I0929 14:44:45.302916 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" exitCode=0 Sep 29 14:44:45 crc kubenswrapper[4634]: I0929 14:44:45.302983 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf"} Sep 29 14:44:45 crc kubenswrapper[4634]: I0929 14:44:45.303036 4634 scope.go:117] "RemoveContainer" containerID="891ef3c942be7a286ef00514772523e961e9b293c3d77750845264de229b3602" Sep 29 14:44:45 crc kubenswrapper[4634]: I0929 14:44:45.304122 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:44:45 crc kubenswrapper[4634]: E0929 14:44:45.304418 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:44:57 crc kubenswrapper[4634]: I0929 14:44:57.110527 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:44:57 crc kubenswrapper[4634]: E0929 14:44:57.111552 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.193360 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs"] Sep 29 14:45:00 crc kubenswrapper[4634]: E0929 14:45:00.195928 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerName="extract-content" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.195960 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerName="extract-content" Sep 29 14:45:00 crc kubenswrapper[4634]: E0929 14:45:00.195988 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerName="extract-utilities" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.195995 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerName="extract-utilities" Sep 29 14:45:00 crc kubenswrapper[4634]: E0929 14:45:00.196006 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerName="registry-server" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.196012 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerName="registry-server" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.196234 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="61b1ee69-3e86-4c9f-8f8f-fc7f795612f6" containerName="registry-server" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.196947 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.201720 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.204931 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.211394 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs"] Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.234355 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/227599ef-1f53-41b3-8061-0f2a5fe53385-config-volume\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.234462 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/227599ef-1f53-41b3-8061-0f2a5fe53385-secret-volume\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.234578 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69qc7\" (UniqueName: \"kubernetes.io/projected/227599ef-1f53-41b3-8061-0f2a5fe53385-kube-api-access-69qc7\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.336727 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69qc7\" (UniqueName: \"kubernetes.io/projected/227599ef-1f53-41b3-8061-0f2a5fe53385-kube-api-access-69qc7\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.336822 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/227599ef-1f53-41b3-8061-0f2a5fe53385-config-volume\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.336862 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/227599ef-1f53-41b3-8061-0f2a5fe53385-secret-volume\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.338020 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/227599ef-1f53-41b3-8061-0f2a5fe53385-config-volume\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.351047 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/227599ef-1f53-41b3-8061-0f2a5fe53385-secret-volume\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.360316 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69qc7\" (UniqueName: \"kubernetes.io/projected/227599ef-1f53-41b3-8061-0f2a5fe53385-kube-api-access-69qc7\") pod \"collect-profiles-29319285-7rqgs\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:00 crc kubenswrapper[4634]: I0929 14:45:00.532178 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:01 crc kubenswrapper[4634]: I0929 14:45:01.053011 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs"] Sep 29 14:45:01 crc kubenswrapper[4634]: I0929 14:45:01.461419 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" event={"ID":"227599ef-1f53-41b3-8061-0f2a5fe53385","Type":"ContainerStarted","Data":"8a80e55d3f71ed53777b7d98eaa0415f90d11d63e830476638171dc2112ee381"} Sep 29 14:45:01 crc kubenswrapper[4634]: I0929 14:45:01.461485 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" event={"ID":"227599ef-1f53-41b3-8061-0f2a5fe53385","Type":"ContainerStarted","Data":"10c8f7550196fff9645fc4a14df108e79a9409ca15ba72fac44d7a1de74576ca"} Sep 29 14:45:01 crc kubenswrapper[4634]: I0929 14:45:01.482750 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" podStartSLOduration=1.482728652 podStartE2EDuration="1.482728652s" podCreationTimestamp="2025-09-29 14:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:45:01.47714932 +0000 UTC m=+3632.045877069" watchObservedRunningTime="2025-09-29 14:45:01.482728652 +0000 UTC m=+3632.051456401" Sep 29 14:45:02 crc kubenswrapper[4634]: I0929 14:45:02.476059 4634 generic.go:334] "Generic (PLEG): container finished" podID="227599ef-1f53-41b3-8061-0f2a5fe53385" containerID="8a80e55d3f71ed53777b7d98eaa0415f90d11d63e830476638171dc2112ee381" exitCode=0 Sep 29 14:45:02 crc kubenswrapper[4634]: I0929 14:45:02.476397 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" event={"ID":"227599ef-1f53-41b3-8061-0f2a5fe53385","Type":"ContainerDied","Data":"8a80e55d3f71ed53777b7d98eaa0415f90d11d63e830476638171dc2112ee381"} Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.172581 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.336739 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/227599ef-1f53-41b3-8061-0f2a5fe53385-config-volume\") pod \"227599ef-1f53-41b3-8061-0f2a5fe53385\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.337003 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69qc7\" (UniqueName: \"kubernetes.io/projected/227599ef-1f53-41b3-8061-0f2a5fe53385-kube-api-access-69qc7\") pod \"227599ef-1f53-41b3-8061-0f2a5fe53385\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.337107 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/227599ef-1f53-41b3-8061-0f2a5fe53385-secret-volume\") pod \"227599ef-1f53-41b3-8061-0f2a5fe53385\" (UID: \"227599ef-1f53-41b3-8061-0f2a5fe53385\") " Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.338106 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/227599ef-1f53-41b3-8061-0f2a5fe53385-config-volume" (OuterVolumeSpecName: "config-volume") pod "227599ef-1f53-41b3-8061-0f2a5fe53385" (UID: "227599ef-1f53-41b3-8061-0f2a5fe53385"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.344696 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/227599ef-1f53-41b3-8061-0f2a5fe53385-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "227599ef-1f53-41b3-8061-0f2a5fe53385" (UID: "227599ef-1f53-41b3-8061-0f2a5fe53385"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.362580 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227599ef-1f53-41b3-8061-0f2a5fe53385-kube-api-access-69qc7" (OuterVolumeSpecName: "kube-api-access-69qc7") pod "227599ef-1f53-41b3-8061-0f2a5fe53385" (UID: "227599ef-1f53-41b3-8061-0f2a5fe53385"). InnerVolumeSpecName "kube-api-access-69qc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.439587 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69qc7\" (UniqueName: \"kubernetes.io/projected/227599ef-1f53-41b3-8061-0f2a5fe53385-kube-api-access-69qc7\") on node \"crc\" DevicePath \"\"" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.439634 4634 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/227599ef-1f53-41b3-8061-0f2a5fe53385-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.439644 4634 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/227599ef-1f53-41b3-8061-0f2a5fe53385-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.496425 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" event={"ID":"227599ef-1f53-41b3-8061-0f2a5fe53385","Type":"ContainerDied","Data":"10c8f7550196fff9645fc4a14df108e79a9409ca15ba72fac44d7a1de74576ca"} Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.496474 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10c8f7550196fff9645fc4a14df108e79a9409ca15ba72fac44d7a1de74576ca" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.496511 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319285-7rqgs" Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.581172 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q"] Sep 29 14:45:04 crc kubenswrapper[4634]: I0929 14:45:04.589014 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319240-4wf6q"] Sep 29 14:45:06 crc kubenswrapper[4634]: I0929 14:45:06.123375 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24d68595-200b-4809-9287-0d4c1c332bdd" path="/var/lib/kubelet/pods/24d68595-200b-4809-9287-0d4c1c332bdd/volumes" Sep 29 14:45:08 crc kubenswrapper[4634]: I0929 14:45:08.111130 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:45:08 crc kubenswrapper[4634]: E0929 14:45:08.111993 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:45:19 crc kubenswrapper[4634]: I0929 14:45:19.110805 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:45:19 crc kubenswrapper[4634]: E0929 14:45:19.113704 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:45:32 crc kubenswrapper[4634]: I0929 14:45:32.110745 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:45:32 crc kubenswrapper[4634]: E0929 14:45:32.112904 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:45:46 crc kubenswrapper[4634]: I0929 14:45:46.110719 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:45:46 crc kubenswrapper[4634]: E0929 14:45:46.111825 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:45:53 crc kubenswrapper[4634]: I0929 14:45:53.890290 4634 scope.go:117] "RemoveContainer" containerID="9ffc0a9dfcc18bb73ed605b715e1bd06066dae1076dabccddff127b43ef5ecaf" Sep 29 14:45:57 crc kubenswrapper[4634]: I0929 14:45:57.110750 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:45:57 crc kubenswrapper[4634]: E0929 14:45:57.111995 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:46:11 crc kubenswrapper[4634]: I0929 14:46:11.111864 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:46:11 crc kubenswrapper[4634]: E0929 14:46:11.113479 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:46:24 crc kubenswrapper[4634]: I0929 14:46:24.110680 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:46:24 crc kubenswrapper[4634]: E0929 14:46:24.111946 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:46:39 crc kubenswrapper[4634]: I0929 14:46:39.111517 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:46:39 crc kubenswrapper[4634]: E0929 14:46:39.113014 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:46:52 crc kubenswrapper[4634]: I0929 14:46:52.121873 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:46:52 crc kubenswrapper[4634]: E0929 14:46:52.125534 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:47:04 crc kubenswrapper[4634]: I0929 14:47:04.111246 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:47:04 crc kubenswrapper[4634]: E0929 14:47:04.112380 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:47:18 crc kubenswrapper[4634]: I0929 14:47:18.111372 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:47:18 crc kubenswrapper[4634]: E0929 14:47:18.114899 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:47:31 crc kubenswrapper[4634]: I0929 14:47:31.111386 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:47:31 crc kubenswrapper[4634]: E0929 14:47:31.112663 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:47:42 crc kubenswrapper[4634]: I0929 14:47:42.110754 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:47:42 crc kubenswrapper[4634]: E0929 14:47:42.111555 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.132694 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:47:56 crc kubenswrapper[4634]: E0929 14:47:56.134880 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.845815 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m2b9w"] Sep 29 14:47:56 crc kubenswrapper[4634]: E0929 14:47:56.846536 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227599ef-1f53-41b3-8061-0f2a5fe53385" containerName="collect-profiles" Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.846563 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="227599ef-1f53-41b3-8061-0f2a5fe53385" containerName="collect-profiles" Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.846839 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="227599ef-1f53-41b3-8061-0f2a5fe53385" containerName="collect-profiles" Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.848558 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.874165 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2b9w"] Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.900739 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwlbt\" (UniqueName: \"kubernetes.io/projected/6f350a81-9915-416a-9b62-fcd5b0961d27-kube-api-access-rwlbt\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.901173 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-catalog-content\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:56 crc kubenswrapper[4634]: I0929 14:47:56.901343 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-utilities\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:57 crc kubenswrapper[4634]: I0929 14:47:57.003337 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-catalog-content\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:57 crc kubenswrapper[4634]: I0929 14:47:57.003431 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-utilities\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:57 crc kubenswrapper[4634]: I0929 14:47:57.003553 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwlbt\" (UniqueName: \"kubernetes.io/projected/6f350a81-9915-416a-9b62-fcd5b0961d27-kube-api-access-rwlbt\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:57 crc kubenswrapper[4634]: I0929 14:47:57.012236 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-utilities\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:57 crc kubenswrapper[4634]: I0929 14:47:57.012545 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-catalog-content\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:57 crc kubenswrapper[4634]: I0929 14:47:57.027708 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwlbt\" (UniqueName: \"kubernetes.io/projected/6f350a81-9915-416a-9b62-fcd5b0961d27-kube-api-access-rwlbt\") pod \"redhat-marketplace-m2b9w\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:57 crc kubenswrapper[4634]: I0929 14:47:57.189637 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:47:57 crc kubenswrapper[4634]: I0929 14:47:57.765524 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2b9w"] Sep 29 14:47:58 crc kubenswrapper[4634]: I0929 14:47:58.471318 4634 generic.go:334] "Generic (PLEG): container finished" podID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerID="8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0" exitCode=0 Sep 29 14:47:58 crc kubenswrapper[4634]: I0929 14:47:58.473175 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2b9w" event={"ID":"6f350a81-9915-416a-9b62-fcd5b0961d27","Type":"ContainerDied","Data":"8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0"} Sep 29 14:47:58 crc kubenswrapper[4634]: I0929 14:47:58.473355 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2b9w" event={"ID":"6f350a81-9915-416a-9b62-fcd5b0961d27","Type":"ContainerStarted","Data":"5c2b1468048dd6566a52bf0a8884d3d35ffcd6bc6e7580879bc5e017b7d1ee91"} Sep 29 14:47:58 crc kubenswrapper[4634]: I0929 14:47:58.477187 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:48:00 crc kubenswrapper[4634]: I0929 14:48:00.521651 4634 generic.go:334] "Generic (PLEG): container finished" podID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerID="f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a" exitCode=0 Sep 29 14:48:00 crc kubenswrapper[4634]: I0929 14:48:00.522274 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2b9w" event={"ID":"6f350a81-9915-416a-9b62-fcd5b0961d27","Type":"ContainerDied","Data":"f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a"} Sep 29 14:48:01 crc kubenswrapper[4634]: I0929 14:48:01.535206 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2b9w" event={"ID":"6f350a81-9915-416a-9b62-fcd5b0961d27","Type":"ContainerStarted","Data":"48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63"} Sep 29 14:48:01 crc kubenswrapper[4634]: I0929 14:48:01.557364 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m2b9w" podStartSLOduration=3.058917702 podStartE2EDuration="5.557343222s" podCreationTimestamp="2025-09-29 14:47:56 +0000 UTC" firstStartedPulling="2025-09-29 14:47:58.475972468 +0000 UTC m=+3809.044700217" lastFinishedPulling="2025-09-29 14:48:00.974397988 +0000 UTC m=+3811.543125737" observedRunningTime="2025-09-29 14:48:01.55396178 +0000 UTC m=+3812.122689529" watchObservedRunningTime="2025-09-29 14:48:01.557343222 +0000 UTC m=+3812.126070971" Sep 29 14:48:07 crc kubenswrapper[4634]: I0929 14:48:07.190127 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:48:07 crc kubenswrapper[4634]: I0929 14:48:07.191049 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:48:07 crc kubenswrapper[4634]: I0929 14:48:07.249344 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:48:07 crc kubenswrapper[4634]: I0929 14:48:07.653075 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:48:07 crc kubenswrapper[4634]: I0929 14:48:07.707369 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2b9w"] Sep 29 14:48:09 crc kubenswrapper[4634]: I0929 14:48:09.616439 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m2b9w" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerName="registry-server" containerID="cri-o://48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63" gracePeriod=2 Sep 29 14:48:09 crc kubenswrapper[4634]: I0929 14:48:09.918332 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-w4xqx"] Sep 29 14:48:09 crc kubenswrapper[4634]: I0929 14:48:09.921961 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:09 crc kubenswrapper[4634]: I0929 14:48:09.930616 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w4xqx"] Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.055192 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h74pq\" (UniqueName: \"kubernetes.io/projected/0d29b02a-75cf-4064-b063-4705c99544e7-kube-api-access-h74pq\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.058673 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d29b02a-75cf-4064-b063-4705c99544e7-catalog-content\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.059039 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d29b02a-75cf-4064-b063-4705c99544e7-utilities\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.119670 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:48:10 crc kubenswrapper[4634]: E0929 14:48:10.123107 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.170874 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h74pq\" (UniqueName: \"kubernetes.io/projected/0d29b02a-75cf-4064-b063-4705c99544e7-kube-api-access-h74pq\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.170970 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d29b02a-75cf-4064-b063-4705c99544e7-catalog-content\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.171039 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d29b02a-75cf-4064-b063-4705c99544e7-utilities\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.171715 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d29b02a-75cf-4064-b063-4705c99544e7-utilities\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.172340 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d29b02a-75cf-4064-b063-4705c99544e7-catalog-content\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.205245 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h74pq\" (UniqueName: \"kubernetes.io/projected/0d29b02a-75cf-4064-b063-4705c99544e7-kube-api-access-h74pq\") pod \"certified-operators-w4xqx\" (UID: \"0d29b02a-75cf-4064-b063-4705c99544e7\") " pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:10 crc kubenswrapper[4634]: I0929 14:48:10.251804 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.444818 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.583274 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-utilities\") pod \"6f350a81-9915-416a-9b62-fcd5b0961d27\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.584075 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-catalog-content\") pod \"6f350a81-9915-416a-9b62-fcd5b0961d27\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.584204 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwlbt\" (UniqueName: \"kubernetes.io/projected/6f350a81-9915-416a-9b62-fcd5b0961d27-kube-api-access-rwlbt\") pod \"6f350a81-9915-416a-9b62-fcd5b0961d27\" (UID: \"6f350a81-9915-416a-9b62-fcd5b0961d27\") " Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.584548 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-utilities" (OuterVolumeSpecName: "utilities") pod "6f350a81-9915-416a-9b62-fcd5b0961d27" (UID: "6f350a81-9915-416a-9b62-fcd5b0961d27"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.585017 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.590159 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f350a81-9915-416a-9b62-fcd5b0961d27-kube-api-access-rwlbt" (OuterVolumeSpecName: "kube-api-access-rwlbt") pod "6f350a81-9915-416a-9b62-fcd5b0961d27" (UID: "6f350a81-9915-416a-9b62-fcd5b0961d27"). InnerVolumeSpecName "kube-api-access-rwlbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.605261 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f350a81-9915-416a-9b62-fcd5b0961d27" (UID: "6f350a81-9915-416a-9b62-fcd5b0961d27"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.637368 4634 generic.go:334] "Generic (PLEG): container finished" podID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerID="48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63" exitCode=0 Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.637418 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2b9w" event={"ID":"6f350a81-9915-416a-9b62-fcd5b0961d27","Type":"ContainerDied","Data":"48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63"} Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.637454 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m2b9w" event={"ID":"6f350a81-9915-416a-9b62-fcd5b0961d27","Type":"ContainerDied","Data":"5c2b1468048dd6566a52bf0a8884d3d35ffcd6bc6e7580879bc5e017b7d1ee91"} Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.637475 4634 scope.go:117] "RemoveContainer" containerID="48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.637634 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m2b9w" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.687445 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f350a81-9915-416a-9b62-fcd5b0961d27-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.687478 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwlbt\" (UniqueName: \"kubernetes.io/projected/6f350a81-9915-416a-9b62-fcd5b0961d27-kube-api-access-rwlbt\") on node \"crc\" DevicePath \"\"" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.696177 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2b9w"] Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.703501 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m2b9w"] Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.704763 4634 scope.go:117] "RemoveContainer" containerID="f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.796484 4634 scope.go:117] "RemoveContainer" containerID="8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.865619 4634 scope.go:117] "RemoveContainer" containerID="48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63" Sep 29 14:48:11 crc kubenswrapper[4634]: E0929 14:48:10.866337 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63\": container with ID starting with 48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63 not found: ID does not exist" containerID="48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.866377 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63"} err="failed to get container status \"48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63\": rpc error: code = NotFound desc = could not find container \"48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63\": container with ID starting with 48ab79ba17f2a2363bde56306a04f38996d33803695f122090c38680ef668e63 not found: ID does not exist" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.866405 4634 scope.go:117] "RemoveContainer" containerID="f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a" Sep 29 14:48:11 crc kubenswrapper[4634]: E0929 14:48:10.874262 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a\": container with ID starting with f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a not found: ID does not exist" containerID="f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.874316 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a"} err="failed to get container status \"f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a\": rpc error: code = NotFound desc = could not find container \"f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a\": container with ID starting with f5d19a88959db23af02aebdedb95b90278bacc8d40a550e99a47b8e688e7739a not found: ID does not exist" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.874350 4634 scope.go:117] "RemoveContainer" containerID="8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0" Sep 29 14:48:11 crc kubenswrapper[4634]: E0929 14:48:10.875660 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0\": container with ID starting with 8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0 not found: ID does not exist" containerID="8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:10.875684 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0"} err="failed to get container status \"8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0\": rpc error: code = NotFound desc = could not find container \"8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0\": container with ID starting with 8065ef68a4cd962f0cd30b92a1d50b30d69dad2486c1ad65be287a9ce92ee0d0 not found: ID does not exist" Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:11.525193 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w4xqx"] Sep 29 14:48:11 crc kubenswrapper[4634]: I0929 14:48:11.665886 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4xqx" event={"ID":"0d29b02a-75cf-4064-b063-4705c99544e7","Type":"ContainerStarted","Data":"8df39db495fe5b10bf8a5e0e13bbc7a2c70cb7067219df835e86a8b0dc2584c4"} Sep 29 14:48:12 crc kubenswrapper[4634]: I0929 14:48:12.121620 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" path="/var/lib/kubelet/pods/6f350a81-9915-416a-9b62-fcd5b0961d27/volumes" Sep 29 14:48:12 crc kubenswrapper[4634]: I0929 14:48:12.678011 4634 generic.go:334] "Generic (PLEG): container finished" podID="0d29b02a-75cf-4064-b063-4705c99544e7" containerID="4be616a28c4f27c1934b2fc045358e41000abdd5e758405db5df9e9043638a32" exitCode=0 Sep 29 14:48:12 crc kubenswrapper[4634]: I0929 14:48:12.679968 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4xqx" event={"ID":"0d29b02a-75cf-4064-b063-4705c99544e7","Type":"ContainerDied","Data":"4be616a28c4f27c1934b2fc045358e41000abdd5e758405db5df9e9043638a32"} Sep 29 14:48:20 crc kubenswrapper[4634]: I0929 14:48:20.769745 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4xqx" event={"ID":"0d29b02a-75cf-4064-b063-4705c99544e7","Type":"ContainerStarted","Data":"549e58d8adcb73e758e62a6b37b138f2ec01408ab9dc21308ae0a482b3ce3f2d"} Sep 29 14:48:21 crc kubenswrapper[4634]: I0929 14:48:21.781999 4634 generic.go:334] "Generic (PLEG): container finished" podID="0d29b02a-75cf-4064-b063-4705c99544e7" containerID="549e58d8adcb73e758e62a6b37b138f2ec01408ab9dc21308ae0a482b3ce3f2d" exitCode=0 Sep 29 14:48:21 crc kubenswrapper[4634]: I0929 14:48:21.782076 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4xqx" event={"ID":"0d29b02a-75cf-4064-b063-4705c99544e7","Type":"ContainerDied","Data":"549e58d8adcb73e758e62a6b37b138f2ec01408ab9dc21308ae0a482b3ce3f2d"} Sep 29 14:48:22 crc kubenswrapper[4634]: I0929 14:48:22.792657 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-w4xqx" event={"ID":"0d29b02a-75cf-4064-b063-4705c99544e7","Type":"ContainerStarted","Data":"61d6925d8caf5d94b7b3763c833343df18d9db713ca65c2f23f14fdf79ecf3a2"} Sep 29 14:48:22 crc kubenswrapper[4634]: I0929 14:48:22.818196 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-w4xqx" podStartSLOduration=4.276644549 podStartE2EDuration="13.81817199s" podCreationTimestamp="2025-09-29 14:48:09 +0000 UTC" firstStartedPulling="2025-09-29 14:48:12.681951273 +0000 UTC m=+3823.250679022" lastFinishedPulling="2025-09-29 14:48:22.223478674 +0000 UTC m=+3832.792206463" observedRunningTime="2025-09-29 14:48:22.810018208 +0000 UTC m=+3833.378745967" watchObservedRunningTime="2025-09-29 14:48:22.81817199 +0000 UTC m=+3833.386899749" Sep 29 14:48:25 crc kubenswrapper[4634]: I0929 14:48:25.110932 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:48:25 crc kubenswrapper[4634]: E0929 14:48:25.111798 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:48:30 crc kubenswrapper[4634]: I0929 14:48:30.252575 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:30 crc kubenswrapper[4634]: I0929 14:48:30.253215 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:30 crc kubenswrapper[4634]: I0929 14:48:30.304852 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:30 crc kubenswrapper[4634]: I0929 14:48:30.932998 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-w4xqx" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.020861 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-w4xqx"] Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.068477 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fqr45"] Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.069230 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fqr45" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="registry-server" containerID="cri-o://eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5" gracePeriod=2 Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.699144 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqr45" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.819997 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-catalog-content\") pod \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.820325 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjhq8\" (UniqueName: \"kubernetes.io/projected/dd668355-50fc-4cb3-bc77-fdfc56f545b0-kube-api-access-kjhq8\") pod \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.820487 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-utilities\") pod \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\" (UID: \"dd668355-50fc-4cb3-bc77-fdfc56f545b0\") " Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.824333 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-utilities" (OuterVolumeSpecName: "utilities") pod "dd668355-50fc-4cb3-bc77-fdfc56f545b0" (UID: "dd668355-50fc-4cb3-bc77-fdfc56f545b0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.841177 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd668355-50fc-4cb3-bc77-fdfc56f545b0-kube-api-access-kjhq8" (OuterVolumeSpecName: "kube-api-access-kjhq8") pod "dd668355-50fc-4cb3-bc77-fdfc56f545b0" (UID: "dd668355-50fc-4cb3-bc77-fdfc56f545b0"). InnerVolumeSpecName "kube-api-access-kjhq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.905331 4634 generic.go:334] "Generic (PLEG): container finished" podID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerID="eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5" exitCode=0 Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.905537 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqr45" event={"ID":"dd668355-50fc-4cb3-bc77-fdfc56f545b0","Type":"ContainerDied","Data":"eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5"} Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.905721 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fqr45" event={"ID":"dd668355-50fc-4cb3-bc77-fdfc56f545b0","Type":"ContainerDied","Data":"0502c48920d33c30050cb7acb5554846187c47024b305c8db4d90bee7547a61d"} Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.905746 4634 scope.go:117] "RemoveContainer" containerID="eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.905643 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fqr45" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.927686 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.927718 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjhq8\" (UniqueName: \"kubernetes.io/projected/dd668355-50fc-4cb3-bc77-fdfc56f545b0-kube-api-access-kjhq8\") on node \"crc\" DevicePath \"\"" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.939293 4634 scope.go:117] "RemoveContainer" containerID="4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.941293 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd668355-50fc-4cb3-bc77-fdfc56f545b0" (UID: "dd668355-50fc-4cb3-bc77-fdfc56f545b0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:48:31 crc kubenswrapper[4634]: I0929 14:48:31.985927 4634 scope.go:117] "RemoveContainer" containerID="d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9" Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.016605 4634 scope.go:117] "RemoveContainer" containerID="eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5" Sep 29 14:48:32 crc kubenswrapper[4634]: E0929 14:48:32.017099 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5\": container with ID starting with eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5 not found: ID does not exist" containerID="eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5" Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.017143 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5"} err="failed to get container status \"eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5\": rpc error: code = NotFound desc = could not find container \"eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5\": container with ID starting with eeb4f0310e815f0bbf4e76b949f138211146f5034f8bb0eb444d28e5a808eda5 not found: ID does not exist" Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.017182 4634 scope.go:117] "RemoveContainer" containerID="4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60" Sep 29 14:48:32 crc kubenswrapper[4634]: E0929 14:48:32.017455 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60\": container with ID starting with 4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60 not found: ID does not exist" containerID="4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60" Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.017493 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60"} err="failed to get container status \"4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60\": rpc error: code = NotFound desc = could not find container \"4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60\": container with ID starting with 4288aedc063353ce9744d8331f26889e24960e8bbdea89daf2ed1ff88c60ca60 not found: ID does not exist" Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.019367 4634 scope.go:117] "RemoveContainer" containerID="d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9" Sep 29 14:48:32 crc kubenswrapper[4634]: E0929 14:48:32.019772 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9\": container with ID starting with d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9 not found: ID does not exist" containerID="d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9" Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.019878 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9"} err="failed to get container status \"d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9\": rpc error: code = NotFound desc = could not find container \"d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9\": container with ID starting with d13fcc4de3a866e6fdbc340c5f8ebb093cb0a200370130ead11b6ccea8e62ba9 not found: ID does not exist" Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.030018 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd668355-50fc-4cb3-bc77-fdfc56f545b0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.236496 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fqr45"] Sep 29 14:48:32 crc kubenswrapper[4634]: I0929 14:48:32.246397 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fqr45"] Sep 29 14:48:34 crc kubenswrapper[4634]: I0929 14:48:34.120709 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" path="/var/lib/kubelet/pods/dd668355-50fc-4cb3-bc77-fdfc56f545b0/volumes" Sep 29 14:48:37 crc kubenswrapper[4634]: I0929 14:48:37.111319 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:48:37 crc kubenswrapper[4634]: E0929 14:48:37.112539 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.169750 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-88vg9"] Sep 29 14:48:47 crc kubenswrapper[4634]: E0929 14:48:47.170943 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="extract-utilities" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.170960 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="extract-utilities" Sep 29 14:48:47 crc kubenswrapper[4634]: E0929 14:48:47.171005 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="registry-server" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.171016 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="registry-server" Sep 29 14:48:47 crc kubenswrapper[4634]: E0929 14:48:47.171033 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerName="extract-utilities" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.171048 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerName="extract-utilities" Sep 29 14:48:47 crc kubenswrapper[4634]: E0929 14:48:47.171080 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerName="registry-server" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.171113 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerName="registry-server" Sep 29 14:48:47 crc kubenswrapper[4634]: E0929 14:48:47.171132 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="extract-content" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.171141 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="extract-content" Sep 29 14:48:47 crc kubenswrapper[4634]: E0929 14:48:47.171158 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerName="extract-content" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.171167 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerName="extract-content" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.171402 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f350a81-9915-416a-9b62-fcd5b0961d27" containerName="registry-server" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.171447 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd668355-50fc-4cb3-bc77-fdfc56f545b0" containerName="registry-server" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.173291 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.189043 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-88vg9"] Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.355731 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-catalog-content\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.355846 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqs7m\" (UniqueName: \"kubernetes.io/projected/61895d97-e2fc-4765-96cb-1f15c5f89825-kube-api-access-bqs7m\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.355914 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-utilities\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.458414 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-catalog-content\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.458484 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqs7m\" (UniqueName: \"kubernetes.io/projected/61895d97-e2fc-4765-96cb-1f15c5f89825-kube-api-access-bqs7m\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.458523 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-utilities\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.460464 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-utilities\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.461774 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-catalog-content\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.479825 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqs7m\" (UniqueName: \"kubernetes.io/projected/61895d97-e2fc-4765-96cb-1f15c5f89825-kube-api-access-bqs7m\") pod \"redhat-operators-88vg9\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:47 crc kubenswrapper[4634]: I0929 14:48:47.503012 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:48 crc kubenswrapper[4634]: I0929 14:48:48.057513 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-88vg9"] Sep 29 14:48:48 crc kubenswrapper[4634]: I0929 14:48:48.065750 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88vg9" event={"ID":"61895d97-e2fc-4765-96cb-1f15c5f89825","Type":"ContainerStarted","Data":"dccdfb158b4a2e752224ebadedb391adb084a3c4ac9ba32229251a47454757fd"} Sep 29 14:48:49 crc kubenswrapper[4634]: I0929 14:48:49.077160 4634 generic.go:334] "Generic (PLEG): container finished" podID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerID="9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a" exitCode=0 Sep 29 14:48:49 crc kubenswrapper[4634]: I0929 14:48:49.077430 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88vg9" event={"ID":"61895d97-e2fc-4765-96cb-1f15c5f89825","Type":"ContainerDied","Data":"9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a"} Sep 29 14:48:50 crc kubenswrapper[4634]: I0929 14:48:50.144893 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:48:50 crc kubenswrapper[4634]: E0929 14:48:50.146570 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:48:51 crc kubenswrapper[4634]: I0929 14:48:51.103349 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88vg9" event={"ID":"61895d97-e2fc-4765-96cb-1f15c5f89825","Type":"ContainerStarted","Data":"f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86"} Sep 29 14:48:53 crc kubenswrapper[4634]: I0929 14:48:53.128923 4634 generic.go:334] "Generic (PLEG): container finished" podID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerID="f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86" exitCode=0 Sep 29 14:48:53 crc kubenswrapper[4634]: I0929 14:48:53.129003 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88vg9" event={"ID":"61895d97-e2fc-4765-96cb-1f15c5f89825","Type":"ContainerDied","Data":"f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86"} Sep 29 14:48:54 crc kubenswrapper[4634]: I0929 14:48:54.140697 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88vg9" event={"ID":"61895d97-e2fc-4765-96cb-1f15c5f89825","Type":"ContainerStarted","Data":"5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2"} Sep 29 14:48:54 crc kubenswrapper[4634]: I0929 14:48:54.169833 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-88vg9" podStartSLOduration=2.515400606 podStartE2EDuration="7.169814704s" podCreationTimestamp="2025-09-29 14:48:47 +0000 UTC" firstStartedPulling="2025-09-29 14:48:49.079408703 +0000 UTC m=+3859.648136452" lastFinishedPulling="2025-09-29 14:48:53.733822801 +0000 UTC m=+3864.302550550" observedRunningTime="2025-09-29 14:48:54.164728355 +0000 UTC m=+3864.733456104" watchObservedRunningTime="2025-09-29 14:48:54.169814704 +0000 UTC m=+3864.738542454" Sep 29 14:48:57 crc kubenswrapper[4634]: I0929 14:48:57.503543 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:57 crc kubenswrapper[4634]: I0929 14:48:57.506637 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:48:58 crc kubenswrapper[4634]: I0929 14:48:58.585478 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-88vg9" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="registry-server" probeResult="failure" output=< Sep 29 14:48:58 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:48:58 crc kubenswrapper[4634]: > Sep 29 14:49:04 crc kubenswrapper[4634]: I0929 14:49:04.110380 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:49:04 crc kubenswrapper[4634]: E0929 14:49:04.111318 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:49:07 crc kubenswrapper[4634]: I0929 14:49:07.555292 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:49:07 crc kubenswrapper[4634]: I0929 14:49:07.720234 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:49:07 crc kubenswrapper[4634]: I0929 14:49:07.796636 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-88vg9"] Sep 29 14:49:09 crc kubenswrapper[4634]: I0929 14:49:09.298133 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-88vg9" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="registry-server" containerID="cri-o://5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2" gracePeriod=2 Sep 29 14:49:09 crc kubenswrapper[4634]: I0929 14:49:09.947478 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.137402 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-utilities\") pod \"61895d97-e2fc-4765-96cb-1f15c5f89825\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.137872 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqs7m\" (UniqueName: \"kubernetes.io/projected/61895d97-e2fc-4765-96cb-1f15c5f89825-kube-api-access-bqs7m\") pod \"61895d97-e2fc-4765-96cb-1f15c5f89825\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.137990 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-catalog-content\") pod \"61895d97-e2fc-4765-96cb-1f15c5f89825\" (UID: \"61895d97-e2fc-4765-96cb-1f15c5f89825\") " Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.138444 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-utilities" (OuterVolumeSpecName: "utilities") pod "61895d97-e2fc-4765-96cb-1f15c5f89825" (UID: "61895d97-e2fc-4765-96cb-1f15c5f89825"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.138698 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.144970 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61895d97-e2fc-4765-96cb-1f15c5f89825-kube-api-access-bqs7m" (OuterVolumeSpecName: "kube-api-access-bqs7m") pod "61895d97-e2fc-4765-96cb-1f15c5f89825" (UID: "61895d97-e2fc-4765-96cb-1f15c5f89825"). InnerVolumeSpecName "kube-api-access-bqs7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.225416 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61895d97-e2fc-4765-96cb-1f15c5f89825" (UID: "61895d97-e2fc-4765-96cb-1f15c5f89825"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.239852 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqs7m\" (UniqueName: \"kubernetes.io/projected/61895d97-e2fc-4765-96cb-1f15c5f89825-kube-api-access-bqs7m\") on node \"crc\" DevicePath \"\"" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.239886 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61895d97-e2fc-4765-96cb-1f15c5f89825-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.313600 4634 generic.go:334] "Generic (PLEG): container finished" podID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerID="5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2" exitCode=0 Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.313643 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88vg9" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.313648 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88vg9" event={"ID":"61895d97-e2fc-4765-96cb-1f15c5f89825","Type":"ContainerDied","Data":"5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2"} Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.313708 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88vg9" event={"ID":"61895d97-e2fc-4765-96cb-1f15c5f89825","Type":"ContainerDied","Data":"dccdfb158b4a2e752224ebadedb391adb084a3c4ac9ba32229251a47454757fd"} Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.313734 4634 scope.go:117] "RemoveContainer" containerID="5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.354448 4634 scope.go:117] "RemoveContainer" containerID="f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.359319 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-88vg9"] Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.368168 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-88vg9"] Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.383430 4634 scope.go:117] "RemoveContainer" containerID="9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.444045 4634 scope.go:117] "RemoveContainer" containerID="5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2" Sep 29 14:49:10 crc kubenswrapper[4634]: E0929 14:49:10.444596 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2\": container with ID starting with 5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2 not found: ID does not exist" containerID="5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.444660 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2"} err="failed to get container status \"5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2\": rpc error: code = NotFound desc = could not find container \"5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2\": container with ID starting with 5b226ccb6e2bac85439db1362f0ef3e8ae137e234e189904aaf681ec8c03fdc2 not found: ID does not exist" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.444707 4634 scope.go:117] "RemoveContainer" containerID="f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86" Sep 29 14:49:10 crc kubenswrapper[4634]: E0929 14:49:10.445306 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86\": container with ID starting with f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86 not found: ID does not exist" containerID="f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.445374 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86"} err="failed to get container status \"f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86\": rpc error: code = NotFound desc = could not find container \"f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86\": container with ID starting with f086bff22acdf47ab8f8fd982aa639ddd20f840560b50c2909b7f49a8dc14b86 not found: ID does not exist" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.445417 4634 scope.go:117] "RemoveContainer" containerID="9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a" Sep 29 14:49:10 crc kubenswrapper[4634]: E0929 14:49:10.445920 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a\": container with ID starting with 9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a not found: ID does not exist" containerID="9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a" Sep 29 14:49:10 crc kubenswrapper[4634]: I0929 14:49:10.445958 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a"} err="failed to get container status \"9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a\": rpc error: code = NotFound desc = could not find container \"9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a\": container with ID starting with 9878f98bb70210cd325a4f208657565fcaa8ebe70134d0353a8089e2b852238a not found: ID does not exist" Sep 29 14:49:12 crc kubenswrapper[4634]: I0929 14:49:12.125982 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" path="/var/lib/kubelet/pods/61895d97-e2fc-4765-96cb-1f15c5f89825/volumes" Sep 29 14:49:18 crc kubenswrapper[4634]: I0929 14:49:18.110388 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:49:18 crc kubenswrapper[4634]: E0929 14:49:18.111334 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:49:32 crc kubenswrapper[4634]: I0929 14:49:32.110385 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:49:32 crc kubenswrapper[4634]: E0929 14:49:32.111403 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:49:47 crc kubenswrapper[4634]: I0929 14:49:47.110521 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:49:47 crc kubenswrapper[4634]: I0929 14:49:47.732293 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"23b7656fc59940514ac9a5e95adaea7f99f55e2762c82f9bc547359a65645004"} Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.037420 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rtjc5"] Sep 29 14:51:02 crc kubenswrapper[4634]: E0929 14:51:02.040588 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="extract-utilities" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.040611 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="extract-utilities" Sep 29 14:51:02 crc kubenswrapper[4634]: E0929 14:51:02.040651 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="registry-server" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.040660 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="registry-server" Sep 29 14:51:02 crc kubenswrapper[4634]: E0929 14:51:02.040682 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="extract-content" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.040692 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="extract-content" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.040976 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="61895d97-e2fc-4765-96cb-1f15c5f89825" containerName="registry-server" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.043135 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.065041 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rtjc5"] Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.161717 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hg2s\" (UniqueName: \"kubernetes.io/projected/67dc5d17-d502-48f2-bd2b-b7423b168ee0-kube-api-access-8hg2s\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.161889 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-catalog-content\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.161980 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-utilities\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.263867 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-utilities\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.263963 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hg2s\" (UniqueName: \"kubernetes.io/projected/67dc5d17-d502-48f2-bd2b-b7423b168ee0-kube-api-access-8hg2s\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.264093 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-catalog-content\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.264522 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-utilities\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.264633 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-catalog-content\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.300767 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hg2s\" (UniqueName: \"kubernetes.io/projected/67dc5d17-d502-48f2-bd2b-b7423b168ee0-kube-api-access-8hg2s\") pod \"community-operators-rtjc5\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:02 crc kubenswrapper[4634]: I0929 14:51:02.372447 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:03 crc kubenswrapper[4634]: I0929 14:51:03.032618 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rtjc5"] Sep 29 14:51:03 crc kubenswrapper[4634]: I0929 14:51:03.613235 4634 generic.go:334] "Generic (PLEG): container finished" podID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerID="b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e" exitCode=0 Sep 29 14:51:03 crc kubenswrapper[4634]: I0929 14:51:03.613388 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtjc5" event={"ID":"67dc5d17-d502-48f2-bd2b-b7423b168ee0","Type":"ContainerDied","Data":"b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e"} Sep 29 14:51:03 crc kubenswrapper[4634]: I0929 14:51:03.613737 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtjc5" event={"ID":"67dc5d17-d502-48f2-bd2b-b7423b168ee0","Type":"ContainerStarted","Data":"89c9e73cad3a00715d91ee0a38483c4118854bf2514dcb10a383e8ab2673cf5d"} Sep 29 14:51:04 crc kubenswrapper[4634]: I0929 14:51:04.623972 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtjc5" event={"ID":"67dc5d17-d502-48f2-bd2b-b7423b168ee0","Type":"ContainerStarted","Data":"2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814"} Sep 29 14:51:06 crc kubenswrapper[4634]: I0929 14:51:06.647165 4634 generic.go:334] "Generic (PLEG): container finished" podID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerID="2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814" exitCode=0 Sep 29 14:51:06 crc kubenswrapper[4634]: I0929 14:51:06.647253 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtjc5" event={"ID":"67dc5d17-d502-48f2-bd2b-b7423b168ee0","Type":"ContainerDied","Data":"2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814"} Sep 29 14:51:07 crc kubenswrapper[4634]: I0929 14:51:07.658134 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtjc5" event={"ID":"67dc5d17-d502-48f2-bd2b-b7423b168ee0","Type":"ContainerStarted","Data":"29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e"} Sep 29 14:51:07 crc kubenswrapper[4634]: I0929 14:51:07.687607 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rtjc5" podStartSLOduration=2.232957623 podStartE2EDuration="5.687580283s" podCreationTimestamp="2025-09-29 14:51:02 +0000 UTC" firstStartedPulling="2025-09-29 14:51:03.61815234 +0000 UTC m=+3994.186880099" lastFinishedPulling="2025-09-29 14:51:07.07277501 +0000 UTC m=+3997.641502759" observedRunningTime="2025-09-29 14:51:07.681687602 +0000 UTC m=+3998.250415361" watchObservedRunningTime="2025-09-29 14:51:07.687580283 +0000 UTC m=+3998.256308032" Sep 29 14:51:12 crc kubenswrapper[4634]: I0929 14:51:12.373581 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:12 crc kubenswrapper[4634]: I0929 14:51:12.374244 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:12 crc kubenswrapper[4634]: I0929 14:51:12.453472 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:12 crc kubenswrapper[4634]: I0929 14:51:12.755189 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:12 crc kubenswrapper[4634]: I0929 14:51:12.803652 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rtjc5"] Sep 29 14:51:14 crc kubenswrapper[4634]: I0929 14:51:14.727871 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rtjc5" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerName="registry-server" containerID="cri-o://29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e" gracePeriod=2 Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.306496 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.389451 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hg2s\" (UniqueName: \"kubernetes.io/projected/67dc5d17-d502-48f2-bd2b-b7423b168ee0-kube-api-access-8hg2s\") pod \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.389649 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-utilities\") pod \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.389789 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-catalog-content\") pod \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\" (UID: \"67dc5d17-d502-48f2-bd2b-b7423b168ee0\") " Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.391795 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-utilities" (OuterVolumeSpecName: "utilities") pod "67dc5d17-d502-48f2-bd2b-b7423b168ee0" (UID: "67dc5d17-d502-48f2-bd2b-b7423b168ee0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.400349 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67dc5d17-d502-48f2-bd2b-b7423b168ee0-kube-api-access-8hg2s" (OuterVolumeSpecName: "kube-api-access-8hg2s") pod "67dc5d17-d502-48f2-bd2b-b7423b168ee0" (UID: "67dc5d17-d502-48f2-bd2b-b7423b168ee0"). InnerVolumeSpecName "kube-api-access-8hg2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.447914 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67dc5d17-d502-48f2-bd2b-b7423b168ee0" (UID: "67dc5d17-d502-48f2-bd2b-b7423b168ee0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.492641 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hg2s\" (UniqueName: \"kubernetes.io/projected/67dc5d17-d502-48f2-bd2b-b7423b168ee0-kube-api-access-8hg2s\") on node \"crc\" DevicePath \"\"" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.492673 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.492683 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67dc5d17-d502-48f2-bd2b-b7423b168ee0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.741588 4634 generic.go:334] "Generic (PLEG): container finished" podID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerID="29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e" exitCode=0 Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.741607 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rtjc5" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.741628 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtjc5" event={"ID":"67dc5d17-d502-48f2-bd2b-b7423b168ee0","Type":"ContainerDied","Data":"29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e"} Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.742258 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rtjc5" event={"ID":"67dc5d17-d502-48f2-bd2b-b7423b168ee0","Type":"ContainerDied","Data":"89c9e73cad3a00715d91ee0a38483c4118854bf2514dcb10a383e8ab2673cf5d"} Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.742298 4634 scope.go:117] "RemoveContainer" containerID="29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.800821 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rtjc5"] Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.807445 4634 scope.go:117] "RemoveContainer" containerID="2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.813363 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rtjc5"] Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.850766 4634 scope.go:117] "RemoveContainer" containerID="b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.900510 4634 scope.go:117] "RemoveContainer" containerID="29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e" Sep 29 14:51:15 crc kubenswrapper[4634]: E0929 14:51:15.901417 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e\": container with ID starting with 29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e not found: ID does not exist" containerID="29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.901454 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e"} err="failed to get container status \"29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e\": rpc error: code = NotFound desc = could not find container \"29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e\": container with ID starting with 29c3b5a94a4454de499cc3c38ab82f23103e3b899a578d120a0c4fe7b189a70e not found: ID does not exist" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.901482 4634 scope.go:117] "RemoveContainer" containerID="2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814" Sep 29 14:51:15 crc kubenswrapper[4634]: E0929 14:51:15.904378 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814\": container with ID starting with 2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814 not found: ID does not exist" containerID="2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.904419 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814"} err="failed to get container status \"2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814\": rpc error: code = NotFound desc = could not find container \"2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814\": container with ID starting with 2b0eddbd55cebabddf10d261423d1ce09b7eacfb996d4f89246118dac5d6f814 not found: ID does not exist" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.904440 4634 scope.go:117] "RemoveContainer" containerID="b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e" Sep 29 14:51:15 crc kubenswrapper[4634]: E0929 14:51:15.907044 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e\": container with ID starting with b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e not found: ID does not exist" containerID="b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e" Sep 29 14:51:15 crc kubenswrapper[4634]: I0929 14:51:15.907071 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e"} err="failed to get container status \"b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e\": rpc error: code = NotFound desc = could not find container \"b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e\": container with ID starting with b0d10cee69d7241e95687b190e2ab91cd4f6453cb2241c7f09892b2a4c83bf4e not found: ID does not exist" Sep 29 14:51:16 crc kubenswrapper[4634]: I0929 14:51:16.148729 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" path="/var/lib/kubelet/pods/67dc5d17-d502-48f2-bd2b-b7423b168ee0/volumes" Sep 29 14:52:14 crc kubenswrapper[4634]: I0929 14:52:14.396607 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:52:14 crc kubenswrapper[4634]: I0929 14:52:14.397440 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:52:44 crc kubenswrapper[4634]: I0929 14:52:44.395595 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:52:44 crc kubenswrapper[4634]: I0929 14:52:44.396453 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:53:14 crc kubenswrapper[4634]: I0929 14:53:14.396809 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:53:14 crc kubenswrapper[4634]: I0929 14:53:14.397651 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:53:14 crc kubenswrapper[4634]: I0929 14:53:14.397719 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:53:14 crc kubenswrapper[4634]: I0929 14:53:14.398779 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"23b7656fc59940514ac9a5e95adaea7f99f55e2762c82f9bc547359a65645004"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:53:14 crc kubenswrapper[4634]: I0929 14:53:14.398877 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://23b7656fc59940514ac9a5e95adaea7f99f55e2762c82f9bc547359a65645004" gracePeriod=600 Sep 29 14:53:15 crc kubenswrapper[4634]: I0929 14:53:15.059196 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="23b7656fc59940514ac9a5e95adaea7f99f55e2762c82f9bc547359a65645004" exitCode=0 Sep 29 14:53:15 crc kubenswrapper[4634]: I0929 14:53:15.059526 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"23b7656fc59940514ac9a5e95adaea7f99f55e2762c82f9bc547359a65645004"} Sep 29 14:53:15 crc kubenswrapper[4634]: I0929 14:53:15.059626 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59"} Sep 29 14:53:15 crc kubenswrapper[4634]: I0929 14:53:15.059641 4634 scope.go:117] "RemoveContainer" containerID="75b1f565ba12c2a576e515c40f14ea8b2818dc2cae27efbf467d4bbf43679fbf" Sep 29 14:55:14 crc kubenswrapper[4634]: I0929 14:55:14.396293 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:55:14 crc kubenswrapper[4634]: I0929 14:55:14.397202 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:55:25 crc kubenswrapper[4634]: I0929 14:55:25.559188 4634 generic.go:334] "Generic (PLEG): container finished" podID="1753ec8d-9af3-4930-a9d7-88b1c2f440cb" containerID="d96546c07ac1777ef3c55c2ed0b430cd98d4b0c000d53e49da0c095fcbacc66c" exitCode=0 Sep 29 14:55:25 crc kubenswrapper[4634]: I0929 14:55:25.559756 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"1753ec8d-9af3-4930-a9d7-88b1c2f440cb","Type":"ContainerDied","Data":"d96546c07ac1777ef3c55c2ed0b430cd98d4b0c000d53e49da0c095fcbacc66c"} Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.041006 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.144701 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ca-certs\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.144981 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.145018 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config-secret\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.145053 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.145090 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xknrd\" (UniqueName: \"kubernetes.io/projected/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-kube-api-access-xknrd\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.145152 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-config-data\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.145261 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-workdir\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.145290 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ssh-key\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.145327 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-temporary\") pod \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\" (UID: \"1753ec8d-9af3-4930-a9d7-88b1c2f440cb\") " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.148385 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.154512 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-config-data" (OuterVolumeSpecName: "config-data") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.158674 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-kube-api-access-xknrd" (OuterVolumeSpecName: "kube-api-access-xknrd") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "kube-api-access-xknrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.160100 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.175853 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.181568 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.200517 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.203939 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.232861 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "1753ec8d-9af3-4930-a9d7-88b1c2f440cb" (UID: "1753ec8d-9af3-4930-a9d7-88b1c2f440cb"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.247833 4634 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.247876 4634 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.247893 4634 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.247905 4634 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.260807 4634 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.260856 4634 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.260875 4634 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.260892 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xknrd\" (UniqueName: \"kubernetes.io/projected/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-kube-api-access-xknrd\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.260905 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1753ec8d-9af3-4930-a9d7-88b1c2f440cb-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.286441 4634 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.363598 4634 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.582827 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"1753ec8d-9af3-4930-a9d7-88b1c2f440cb","Type":"ContainerDied","Data":"507e18f4730c1dd478e6d7fb4cb14d218b3bc919645d4e5d98168d6d3be4f92f"} Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.582884 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="507e18f4730c1dd478e6d7fb4cb14d218b3bc919645d4e5d98168d6d3be4f92f" Sep 29 14:55:27 crc kubenswrapper[4634]: I0929 14:55:27.582966 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.326749 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 14:55:37 crc kubenswrapper[4634]: E0929 14:55:37.328234 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerName="extract-content" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.328253 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerName="extract-content" Sep 29 14:55:37 crc kubenswrapper[4634]: E0929 14:55:37.328280 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerName="registry-server" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.328290 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerName="registry-server" Sep 29 14:55:37 crc kubenswrapper[4634]: E0929 14:55:37.328308 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1753ec8d-9af3-4930-a9d7-88b1c2f440cb" containerName="tempest-tests-tempest-tests-runner" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.328317 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="1753ec8d-9af3-4930-a9d7-88b1c2f440cb" containerName="tempest-tests-tempest-tests-runner" Sep 29 14:55:37 crc kubenswrapper[4634]: E0929 14:55:37.328340 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerName="extract-utilities" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.328349 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerName="extract-utilities" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.328755 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="67dc5d17-d502-48f2-bd2b-b7423b168ee0" containerName="registry-server" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.328803 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="1753ec8d-9af3-4930-a9d7-88b1c2f440cb" containerName="tempest-tests-tempest-tests-runner" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.329720 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.335032 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bh8f2" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.340677 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.519223 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cj2t\" (UniqueName: \"kubernetes.io/projected/8951f1a8-0969-4546-b683-b06ea036112c-kube-api-access-2cj2t\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8951f1a8-0969-4546-b683-b06ea036112c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.520061 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8951f1a8-0969-4546-b683-b06ea036112c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.622376 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8951f1a8-0969-4546-b683-b06ea036112c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.622528 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cj2t\" (UniqueName: \"kubernetes.io/projected/8951f1a8-0969-4546-b683-b06ea036112c-kube-api-access-2cj2t\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8951f1a8-0969-4546-b683-b06ea036112c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.635782 4634 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8951f1a8-0969-4546-b683-b06ea036112c\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.648875 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cj2t\" (UniqueName: \"kubernetes.io/projected/8951f1a8-0969-4546-b683-b06ea036112c-kube-api-access-2cj2t\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8951f1a8-0969-4546-b683-b06ea036112c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.694591 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8951f1a8-0969-4546-b683-b06ea036112c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:37 crc kubenswrapper[4634]: I0929 14:55:37.981445 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 14:55:38 crc kubenswrapper[4634]: I0929 14:55:38.726438 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 14:55:38 crc kubenswrapper[4634]: I0929 14:55:38.745852 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 14:55:39 crc kubenswrapper[4634]: I0929 14:55:39.751653 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"8951f1a8-0969-4546-b683-b06ea036112c","Type":"ContainerStarted","Data":"504060058cb4d86dda20ce7c30134eb30aef63ef66d30a86b6a7a39e1bd8b71c"} Sep 29 14:55:40 crc kubenswrapper[4634]: I0929 14:55:40.767278 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"8951f1a8-0969-4546-b683-b06ea036112c","Type":"ContainerStarted","Data":"65669c897e7c2b3f8d382976e96ef2288fa0f93bb13047740924c99b2ee395d3"} Sep 29 14:55:44 crc kubenswrapper[4634]: I0929 14:55:44.395917 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:55:44 crc kubenswrapper[4634]: I0929 14:55:44.397810 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.526455 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=18.241748583 podStartE2EDuration="19.526434494s" podCreationTimestamp="2025-09-29 14:55:37 +0000 UTC" firstStartedPulling="2025-09-29 14:55:38.745486057 +0000 UTC m=+4269.314213806" lastFinishedPulling="2025-09-29 14:55:40.030171958 +0000 UTC m=+4270.598899717" observedRunningTime="2025-09-29 14:55:40.793587019 +0000 UTC m=+4271.362314768" watchObservedRunningTime="2025-09-29 14:55:56.526434494 +0000 UTC m=+4287.095162243" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.531871 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mfrz5/must-gather-fh6d4"] Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.538286 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.544564 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mfrz5/must-gather-fh6d4"] Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.545075 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-mfrz5"/"default-dockercfg-dgcn9" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.545401 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mfrz5"/"openshift-service-ca.crt" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.545477 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mfrz5"/"kube-root-ca.crt" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.653862 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7qnm\" (UniqueName: \"kubernetes.io/projected/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-kube-api-access-h7qnm\") pod \"must-gather-fh6d4\" (UID: \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\") " pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.653939 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-must-gather-output\") pod \"must-gather-fh6d4\" (UID: \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\") " pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.756271 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7qnm\" (UniqueName: \"kubernetes.io/projected/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-kube-api-access-h7qnm\") pod \"must-gather-fh6d4\" (UID: \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\") " pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.756350 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-must-gather-output\") pod \"must-gather-fh6d4\" (UID: \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\") " pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.756915 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-must-gather-output\") pod \"must-gather-fh6d4\" (UID: \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\") " pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 14:55:56 crc kubenswrapper[4634]: I0929 14:55:56.871818 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7qnm\" (UniqueName: \"kubernetes.io/projected/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-kube-api-access-h7qnm\") pod \"must-gather-fh6d4\" (UID: \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\") " pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 14:55:57 crc kubenswrapper[4634]: I0929 14:55:57.160209 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 14:55:57 crc kubenswrapper[4634]: I0929 14:55:57.992115 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mfrz5/must-gather-fh6d4"] Sep 29 14:55:58 crc kubenswrapper[4634]: I0929 14:55:58.994664 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" event={"ID":"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14","Type":"ContainerStarted","Data":"9c808b93eeebf1bf10a569624d7584942d6b7526e128c454bb4067d29266b686"} Sep 29 14:56:05 crc kubenswrapper[4634]: I0929 14:56:05.068448 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" event={"ID":"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14","Type":"ContainerStarted","Data":"a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2"} Sep 29 14:56:05 crc kubenswrapper[4634]: I0929 14:56:05.068972 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" event={"ID":"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14","Type":"ContainerStarted","Data":"b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484"} Sep 29 14:56:05 crc kubenswrapper[4634]: I0929 14:56:05.090438 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" podStartSLOduration=3.431015697 podStartE2EDuration="9.090414119s" podCreationTimestamp="2025-09-29 14:55:56 +0000 UTC" firstStartedPulling="2025-09-29 14:55:58.001313148 +0000 UTC m=+4288.570040907" lastFinishedPulling="2025-09-29 14:56:03.66071158 +0000 UTC m=+4294.229439329" observedRunningTime="2025-09-29 14:56:05.084347975 +0000 UTC m=+4295.653075734" watchObservedRunningTime="2025-09-29 14:56:05.090414119 +0000 UTC m=+4295.659141868" Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.237952 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-bz7df"] Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.240216 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.359923 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a994c0c1-48af-45d9-971d-3614484c0c43-host\") pod \"crc-debug-bz7df\" (UID: \"a994c0c1-48af-45d9-971d-3614484c0c43\") " pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.360175 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmr2l\" (UniqueName: \"kubernetes.io/projected/a994c0c1-48af-45d9-971d-3614484c0c43-kube-api-access-kmr2l\") pod \"crc-debug-bz7df\" (UID: \"a994c0c1-48af-45d9-971d-3614484c0c43\") " pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.462693 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a994c0c1-48af-45d9-971d-3614484c0c43-host\") pod \"crc-debug-bz7df\" (UID: \"a994c0c1-48af-45d9-971d-3614484c0c43\") " pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.462822 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a994c0c1-48af-45d9-971d-3614484c0c43-host\") pod \"crc-debug-bz7df\" (UID: \"a994c0c1-48af-45d9-971d-3614484c0c43\") " pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.462868 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmr2l\" (UniqueName: \"kubernetes.io/projected/a994c0c1-48af-45d9-971d-3614484c0c43-kube-api-access-kmr2l\") pod \"crc-debug-bz7df\" (UID: \"a994c0c1-48af-45d9-971d-3614484c0c43\") " pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.487926 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmr2l\" (UniqueName: \"kubernetes.io/projected/a994c0c1-48af-45d9-971d-3614484c0c43-kube-api-access-kmr2l\") pod \"crc-debug-bz7df\" (UID: \"a994c0c1-48af-45d9-971d-3614484c0c43\") " pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:56:11 crc kubenswrapper[4634]: I0929 14:56:11.564046 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:56:11 crc kubenswrapper[4634]: W0929 14:56:11.605238 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda994c0c1_48af_45d9_971d_3614484c0c43.slice/crio-b86537b68d2cfb2c0f11970b20dfc2c80b015d7a3955431af453101c758b3e20 WatchSource:0}: Error finding container b86537b68d2cfb2c0f11970b20dfc2c80b015d7a3955431af453101c758b3e20: Status 404 returned error can't find the container with id b86537b68d2cfb2c0f11970b20dfc2c80b015d7a3955431af453101c758b3e20 Sep 29 14:56:12 crc kubenswrapper[4634]: I0929 14:56:12.146630 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-bz7df" event={"ID":"a994c0c1-48af-45d9-971d-3614484c0c43","Type":"ContainerStarted","Data":"b86537b68d2cfb2c0f11970b20dfc2c80b015d7a3955431af453101c758b3e20"} Sep 29 14:56:14 crc kubenswrapper[4634]: I0929 14:56:14.396595 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 14:56:14 crc kubenswrapper[4634]: I0929 14:56:14.396968 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 14:56:14 crc kubenswrapper[4634]: I0929 14:56:14.397030 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 14:56:14 crc kubenswrapper[4634]: I0929 14:56:14.397874 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 14:56:14 crc kubenswrapper[4634]: I0929 14:56:14.397926 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" gracePeriod=600 Sep 29 14:56:14 crc kubenswrapper[4634]: E0929 14:56:14.533958 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:56:15 crc kubenswrapper[4634]: I0929 14:56:15.239782 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" exitCode=0 Sep 29 14:56:15 crc kubenswrapper[4634]: I0929 14:56:15.240050 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59"} Sep 29 14:56:15 crc kubenswrapper[4634]: I0929 14:56:15.240823 4634 scope.go:117] "RemoveContainer" containerID="23b7656fc59940514ac9a5e95adaea7f99f55e2762c82f9bc547359a65645004" Sep 29 14:56:15 crc kubenswrapper[4634]: I0929 14:56:15.245969 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:56:15 crc kubenswrapper[4634]: E0929 14:56:15.247538 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:56:25 crc kubenswrapper[4634]: I0929 14:56:25.358915 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-bz7df" event={"ID":"a994c0c1-48af-45d9-971d-3614484c0c43","Type":"ContainerStarted","Data":"6aec0edd364710757820b78be0499b2b4c9a98895ba9136207d08d2045d2b8fb"} Sep 29 14:56:25 crc kubenswrapper[4634]: I0929 14:56:25.386941 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mfrz5/crc-debug-bz7df" podStartSLOduration=1.459288848 podStartE2EDuration="14.386916992s" podCreationTimestamp="2025-09-29 14:56:11 +0000 UTC" firstStartedPulling="2025-09-29 14:56:11.607151434 +0000 UTC m=+4302.175879183" lastFinishedPulling="2025-09-29 14:56:24.534779588 +0000 UTC m=+4315.103507327" observedRunningTime="2025-09-29 14:56:25.374596609 +0000 UTC m=+4315.943324368" watchObservedRunningTime="2025-09-29 14:56:25.386916992 +0000 UTC m=+4315.955644741" Sep 29 14:56:28 crc kubenswrapper[4634]: I0929 14:56:28.111359 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:56:28 crc kubenswrapper[4634]: E0929 14:56:28.112320 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:56:39 crc kubenswrapper[4634]: I0929 14:56:39.110275 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:56:39 crc kubenswrapper[4634]: E0929 14:56:39.111348 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:56:50 crc kubenswrapper[4634]: I0929 14:56:50.117693 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:56:50 crc kubenswrapper[4634]: E0929 14:56:50.122389 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:57:03 crc kubenswrapper[4634]: I0929 14:57:03.111723 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:57:03 crc kubenswrapper[4634]: E0929 14:57:03.113230 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:57:15 crc kubenswrapper[4634]: I0929 14:57:15.136573 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:57:15 crc kubenswrapper[4634]: E0929 14:57:15.137950 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:57:28 crc kubenswrapper[4634]: I0929 14:57:28.111129 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:57:28 crc kubenswrapper[4634]: E0929 14:57:28.112247 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:57:43 crc kubenswrapper[4634]: I0929 14:57:43.110346 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:57:43 crc kubenswrapper[4634]: E0929 14:57:43.111213 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:57:58 crc kubenswrapper[4634]: I0929 14:57:58.110280 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:57:58 crc kubenswrapper[4634]: E0929 14:57:58.110985 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:58:00 crc kubenswrapper[4634]: I0929 14:58:00.224562 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-77c768456b-27trs_4a923c46-c064-4dbd-b91d-cc1379e39d35/barbican-api/0.log" Sep 29 14:58:00 crc kubenswrapper[4634]: I0929 14:58:00.240932 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-77c768456b-27trs_4a923c46-c064-4dbd-b91d-cc1379e39d35/barbican-api-log/0.log" Sep 29 14:58:00 crc kubenswrapper[4634]: I0929 14:58:00.975541 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5658b5d69b-mlcxf_c1e86a12-7d7e-4bbe-bcf2-030f754a91a2/barbican-keystone-listener/0.log" Sep 29 14:58:01 crc kubenswrapper[4634]: I0929 14:58:01.095719 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5658b5d69b-mlcxf_c1e86a12-7d7e-4bbe-bcf2-030f754a91a2/barbican-keystone-listener-log/0.log" Sep 29 14:58:01 crc kubenswrapper[4634]: I0929 14:58:01.345942 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-645d46567c-ngxbx_c697d1f5-42d0-4a87-9704-64a6e1406db1/barbican-worker/0.log" Sep 29 14:58:01 crc kubenswrapper[4634]: I0929 14:58:01.415270 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-645d46567c-ngxbx_c697d1f5-42d0-4a87-9704-64a6e1406db1/barbican-worker-log/0.log" Sep 29 14:58:01 crc kubenswrapper[4634]: I0929 14:58:01.677580 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf_5e3a5bac-db09-4bee-bc1a-a93841ada5ed/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:02 crc kubenswrapper[4634]: I0929 14:58:02.039460 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2985415a-1cb2-4f9a-9a10-c615ddb91dbd/ceilometer-central-agent/0.log" Sep 29 14:58:02 crc kubenswrapper[4634]: I0929 14:58:02.079016 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2985415a-1cb2-4f9a-9a10-c615ddb91dbd/ceilometer-notification-agent/0.log" Sep 29 14:58:02 crc kubenswrapper[4634]: I0929 14:58:02.175963 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2985415a-1cb2-4f9a-9a10-c615ddb91dbd/proxy-httpd/0.log" Sep 29 14:58:02 crc kubenswrapper[4634]: I0929 14:58:02.357615 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2985415a-1cb2-4f9a-9a10-c615ddb91dbd/sg-core/0.log" Sep 29 14:58:02 crc kubenswrapper[4634]: I0929 14:58:02.550525 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8739a6eb-884d-49c7-8ff9-e44b56575552/cinder-api/0.log" Sep 29 14:58:03 crc kubenswrapper[4634]: I0929 14:58:03.231143 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8739a6eb-884d-49c7-8ff9-e44b56575552/cinder-api-log/0.log" Sep 29 14:58:03 crc kubenswrapper[4634]: I0929 14:58:03.337192 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_03850223-163a-4eca-a290-1d072a2b535d/cinder-scheduler/0.log" Sep 29 14:58:03 crc kubenswrapper[4634]: I0929 14:58:03.647132 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_03850223-163a-4eca-a290-1d072a2b535d/probe/0.log" Sep 29 14:58:03 crc kubenswrapper[4634]: I0929 14:58:03.701771 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc_d8fe0f99-6eea-49ca-bf34-fd88555c84ec/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:04 crc kubenswrapper[4634]: I0929 14:58:04.009705 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-v8shz_332f4970-1479-4efc-8b35-e1795111b1b4/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:04 crc kubenswrapper[4634]: I0929 14:58:04.254139 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-jbsbr_8d1a9c90-4eaf-4553-b80b-2d608c11af9a/init/0.log" Sep 29 14:58:04 crc kubenswrapper[4634]: I0929 14:58:04.435784 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-jbsbr_8d1a9c90-4eaf-4553-b80b-2d608c11af9a/init/0.log" Sep 29 14:58:04 crc kubenswrapper[4634]: I0929 14:58:04.593685 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2_268175fe-c76e-4032-8027-db49b1355ec7/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:04 crc kubenswrapper[4634]: I0929 14:58:04.676915 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-jbsbr_8d1a9c90-4eaf-4553-b80b-2d608c11af9a/dnsmasq-dns/0.log" Sep 29 14:58:04 crc kubenswrapper[4634]: I0929 14:58:04.878228 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_961f670c-7ab6-42b3-8fa1-b5494af46245/glance-log/0.log" Sep 29 14:58:04 crc kubenswrapper[4634]: I0929 14:58:04.895733 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_961f670c-7ab6-42b3-8fa1-b5494af46245/glance-httpd/0.log" Sep 29 14:58:05 crc kubenswrapper[4634]: I0929 14:58:05.050422 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3dc03219-407d-4010-9c0f-5bbf4d94da6a/glance-httpd/0.log" Sep 29 14:58:05 crc kubenswrapper[4634]: I0929 14:58:05.158187 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3dc03219-407d-4010-9c0f-5bbf4d94da6a/glance-log/0.log" Sep 29 14:58:05 crc kubenswrapper[4634]: I0929 14:58:05.363065 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5d5866c49b-9tt6g_24cc4bfc-123a-479d-afb7-ca6b62cd7754/horizon/0.log" Sep 29 14:58:05 crc kubenswrapper[4634]: I0929 14:58:05.625660 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb_c2fcbf70-369d-41cd-8187-7e26848b9171/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:05 crc kubenswrapper[4634]: I0929 14:58:05.745459 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5d5866c49b-9tt6g_24cc4bfc-123a-479d-afb7-ca6b62cd7754/horizon-log/0.log" Sep 29 14:58:05 crc kubenswrapper[4634]: I0929 14:58:05.847456 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-zs264_ca087dcb-6346-46ed-9750-b5548355305a/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:06 crc kubenswrapper[4634]: I0929 14:58:06.162116 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_d13e0663-c00d-4276-be1d-fc570182e28a/kube-state-metrics/0.log" Sep 29 14:58:06 crc kubenswrapper[4634]: I0929 14:58:06.449311 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-587bf8586b-wjkjk_f725a01f-c382-4260-8e4e-e530d7c0ed82/keystone-api/0.log" Sep 29 14:58:06 crc kubenswrapper[4634]: I0929 14:58:06.886333 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs_a38e3e32-cd47-4afd-aa38-da7911b1a12f/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:07 crc kubenswrapper[4634]: I0929 14:58:07.114396 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_76a2a736-1945-4e7f-955e-e5c33004d4df/memcached/0.log" Sep 29 14:58:07 crc kubenswrapper[4634]: I0929 14:58:07.502585 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg_b183018d-383a-4d89-bb1f-d5c1f13404a9/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:07 crc kubenswrapper[4634]: I0929 14:58:07.708994 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7f758ffcf7-qsxtz_2e0453ac-a888-4906-8a1f-9ba9a0f797e0/neutron-httpd/0.log" Sep 29 14:58:07 crc kubenswrapper[4634]: I0929 14:58:07.783243 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7f758ffcf7-qsxtz_2e0453ac-a888-4906-8a1f-9ba9a0f797e0/neutron-api/0.log" Sep 29 14:58:08 crc kubenswrapper[4634]: I0929 14:58:08.586604 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ad87c2fd-79c4-4931-99d6-bec867ee637e/nova-cell0-conductor-conductor/0.log" Sep 29 14:58:08 crc kubenswrapper[4634]: I0929 14:58:08.992768 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_c0eded61-0572-44d1-8d17-78191173c99f/nova-cell1-conductor-conductor/0.log" Sep 29 14:58:09 crc kubenswrapper[4634]: I0929 14:58:09.355041 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3eef7d23-1a73-41a3-b80a-d0be5789f09d/nova-api-api/0.log" Sep 29 14:58:09 crc kubenswrapper[4634]: I0929 14:58:09.422438 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_6bb80ea7-a2f1-4eeb-9205-722fda8a48b2/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 14:58:09 crc kubenswrapper[4634]: I0929 14:58:09.453128 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3eef7d23-1a73-41a3-b80a-d0be5789f09d/nova-api-log/0.log" Sep 29 14:58:09 crc kubenswrapper[4634]: I0929 14:58:09.777501 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-x58xd_4dbe661a-c031-4716-9816-d5cb05957a35/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:09 crc kubenswrapper[4634]: I0929 14:58:09.799531 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6b03940d-5de0-4326-b16f-c436f6637a92/nova-metadata-log/0.log" Sep 29 14:58:10 crc kubenswrapper[4634]: I0929 14:58:10.411424 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5dcf49a2-dd23-4b67-9f54-4659168f4f18/mysql-bootstrap/0.log" Sep 29 14:58:10 crc kubenswrapper[4634]: I0929 14:58:10.647442 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_8fca2b6d-9b01-4498-bdc1-619d2b52d173/nova-scheduler-scheduler/0.log" Sep 29 14:58:10 crc kubenswrapper[4634]: I0929 14:58:10.734140 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5dcf49a2-dd23-4b67-9f54-4659168f4f18/mysql-bootstrap/0.log" Sep 29 14:58:10 crc kubenswrapper[4634]: I0929 14:58:10.804349 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5dcf49a2-dd23-4b67-9f54-4659168f4f18/galera/0.log" Sep 29 14:58:11 crc kubenswrapper[4634]: I0929 14:58:11.117870 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_861151f8-60ad-449e-80fa-b1b64e5c5b3e/mysql-bootstrap/0.log" Sep 29 14:58:11 crc kubenswrapper[4634]: I0929 14:58:11.198781 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6b03940d-5de0-4326-b16f-c436f6637a92/nova-metadata-metadata/0.log" Sep 29 14:58:11 crc kubenswrapper[4634]: I0929 14:58:11.368115 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_861151f8-60ad-449e-80fa-b1b64e5c5b3e/mysql-bootstrap/0.log" Sep 29 14:58:11 crc kubenswrapper[4634]: I0929 14:58:11.461991 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_861151f8-60ad-449e-80fa-b1b64e5c5b3e/galera/0.log" Sep 29 14:58:11 crc kubenswrapper[4634]: I0929 14:58:11.475950 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_041479d7-0e40-4b0c-b301-f79c133394dc/openstackclient/0.log" Sep 29 14:58:11 crc kubenswrapper[4634]: I0929 14:58:11.707820 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-lfvq4_07a47ca0-1cd2-4e8d-92ce-37083cde3744/ovn-controller/0.log" Sep 29 14:58:11 crc kubenswrapper[4634]: I0929 14:58:11.802543 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-7bt9h_5a821c1a-6f5a-47af-bbe6-072b2a2a8033/openstack-network-exporter/0.log" Sep 29 14:58:12 crc kubenswrapper[4634]: I0929 14:58:12.054015 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8xcvg_b8867c3c-d76b-4687-a044-15ba4e9b2dc2/ovsdb-server-init/0.log" Sep 29 14:58:12 crc kubenswrapper[4634]: I0929 14:58:12.114985 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:58:12 crc kubenswrapper[4634]: E0929 14:58:12.115319 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:58:12 crc kubenswrapper[4634]: I0929 14:58:12.268004 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8xcvg_b8867c3c-d76b-4687-a044-15ba4e9b2dc2/ovs-vswitchd/0.log" Sep 29 14:58:12 crc kubenswrapper[4634]: I0929 14:58:12.298257 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8xcvg_b8867c3c-d76b-4687-a044-15ba4e9b2dc2/ovsdb-server/0.log" Sep 29 14:58:12 crc kubenswrapper[4634]: I0929 14:58:12.327214 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8xcvg_b8867c3c-d76b-4687-a044-15ba4e9b2dc2/ovsdb-server-init/0.log" Sep 29 14:58:13 crc kubenswrapper[4634]: I0929 14:58:13.163328 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0288fa06-e56a-4201-a883-d1ece43562ac/openstack-network-exporter/0.log" Sep 29 14:58:13 crc kubenswrapper[4634]: I0929 14:58:13.207565 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-ztr49_4ffd6a52-c5fb-4796-b98b-c5ca2a238a41/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:13 crc kubenswrapper[4634]: I0929 14:58:13.223221 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0288fa06-e56a-4201-a883-d1ece43562ac/ovn-northd/0.log" Sep 29 14:58:13 crc kubenswrapper[4634]: I0929 14:58:13.449368 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d077753a-f890-4c33-9d24-d96f3b6117f3/openstack-network-exporter/0.log" Sep 29 14:58:13 crc kubenswrapper[4634]: I0929 14:58:13.451325 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d077753a-f890-4c33-9d24-d96f3b6117f3/ovsdbserver-nb/0.log" Sep 29 14:58:13 crc kubenswrapper[4634]: I0929 14:58:13.663327 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8ff4fb7c-c525-4c15-941e-4b8980a5b140/ovsdbserver-sb/0.log" Sep 29 14:58:13 crc kubenswrapper[4634]: I0929 14:58:13.720979 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8ff4fb7c-c525-4c15-941e-4b8980a5b140/openstack-network-exporter/0.log" Sep 29 14:58:13 crc kubenswrapper[4634]: I0929 14:58:13.965941 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5bfb7db698-tmn8x_2b38f115-526d-4093-b79c-19e6b9258dbf/placement-api/0.log" Sep 29 14:58:14 crc kubenswrapper[4634]: I0929 14:58:14.116301 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5bfb7db698-tmn8x_2b38f115-526d-4093-b79c-19e6b9258dbf/placement-log/0.log" Sep 29 14:58:14 crc kubenswrapper[4634]: I0929 14:58:14.183667 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_51c0f162-132f-48c2-8e8a-65c4c4d69c69/setup-container/0.log" Sep 29 14:58:14 crc kubenswrapper[4634]: I0929 14:58:14.354079 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_51c0f162-132f-48c2-8e8a-65c4c4d69c69/rabbitmq/0.log" Sep 29 14:58:14 crc kubenswrapper[4634]: I0929 14:58:14.394715 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_51c0f162-132f-48c2-8e8a-65c4c4d69c69/setup-container/0.log" Sep 29 14:58:14 crc kubenswrapper[4634]: I0929 14:58:14.517469 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_63bda06a-11bd-41fc-b988-30f1aa86b490/setup-container/0.log" Sep 29 14:58:15 crc kubenswrapper[4634]: I0929 14:58:15.537036 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_63bda06a-11bd-41fc-b988-30f1aa86b490/setup-container/0.log" Sep 29 14:58:15 crc kubenswrapper[4634]: I0929 14:58:15.554806 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_63bda06a-11bd-41fc-b988-30f1aa86b490/rabbitmq/0.log" Sep 29 14:58:15 crc kubenswrapper[4634]: I0929 14:58:15.663637 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk_652c8902-8b97-4a81-8c05-10b0702d1c68/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:15 crc kubenswrapper[4634]: I0929 14:58:15.847692 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-phrjw_8043163a-b8ee-4991-9edb-8c7522be414e/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:15 crc kubenswrapper[4634]: I0929 14:58:15.966732 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj_61b3895b-871d-4318-8fb4-4426fcd6611a/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:16 crc kubenswrapper[4634]: I0929 14:58:16.186269 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-b7fm4_c35a01f0-4d25-41ad-8eff-9d65bbb2fa01/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:16 crc kubenswrapper[4634]: I0929 14:58:16.229237 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-9fkzs_ee18fe6a-41c7-471a-8f99-0cec5b0a2676/ssh-known-hosts-edpm-deployment/0.log" Sep 29 14:58:16 crc kubenswrapper[4634]: I0929 14:58:16.542126 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-847d5655ff-zzkf2_eb9f537d-9c82-4675-aeaf-c0e4656a1330/proxy-server/0.log" Sep 29 14:58:16 crc kubenswrapper[4634]: I0929 14:58:16.710798 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-847d5655ff-zzkf2_eb9f537d-9c82-4675-aeaf-c0e4656a1330/proxy-httpd/0.log" Sep 29 14:58:16 crc kubenswrapper[4634]: I0929 14:58:16.757975 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-9q4mg_6ad020b7-a243-46de-8a47-2bb8af6042a0/swift-ring-rebalance/0.log" Sep 29 14:58:16 crc kubenswrapper[4634]: I0929 14:58:16.864592 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/account-auditor/0.log" Sep 29 14:58:16 crc kubenswrapper[4634]: I0929 14:58:16.942157 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/account-reaper/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.047502 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/account-replicator/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.110675 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bz22p"] Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.113016 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.156760 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bz22p"] Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.216468 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/account-server/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.291689 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/container-replicator/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.302999 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-utilities\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.303146 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-catalog-content\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.303236 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psjpc\" (UniqueName: \"kubernetes.io/projected/342ec666-82a1-4ded-b424-610ef2c77a9f-kube-api-access-psjpc\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.319892 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/container-auditor/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.405736 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-utilities\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.405863 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-catalog-content\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.405939 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psjpc\" (UniqueName: \"kubernetes.io/projected/342ec666-82a1-4ded-b424-610ef2c77a9f-kube-api-access-psjpc\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.406288 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-utilities\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.406384 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-catalog-content\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.433685 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psjpc\" (UniqueName: \"kubernetes.io/projected/342ec666-82a1-4ded-b424-610ef2c77a9f-kube-api-access-psjpc\") pod \"redhat-marketplace-bz22p\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.435616 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.617839 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-auditor/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.631591 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/container-server/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.786127 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/container-updater/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.885892 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-expirer/0.log" Sep 29 14:58:17 crc kubenswrapper[4634]: I0929 14:58:17.999058 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-updater/0.log" Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.263334 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-server/0.log" Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.279007 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-replicator/0.log" Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.295171 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bz22p"] Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.309908 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/rsync/0.log" Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.650666 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/swift-recon-cron/0.log" Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.654171 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-f78vg_8e05d615-586f-430c-a8c9-f871a04f31d2/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.869282 4634 generic.go:334] "Generic (PLEG): container finished" podID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerID="e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97" exitCode=0 Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.869335 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bz22p" event={"ID":"342ec666-82a1-4ded-b424-610ef2c77a9f","Type":"ContainerDied","Data":"e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97"} Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.869368 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bz22p" event={"ID":"342ec666-82a1-4ded-b424-610ef2c77a9f","Type":"ContainerStarted","Data":"4cab9c9f7db78df2774cb3f0b1392a59b797c83cd9617ed20b30b7723f150055"} Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.968761 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_1753ec8d-9af3-4930-a9d7-88b1c2f440cb/tempest-tests-tempest-tests-runner/0.log" Sep 29 14:58:18 crc kubenswrapper[4634]: I0929 14:58:18.995348 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_8951f1a8-0969-4546-b683-b06ea036112c/test-operator-logs-container/0.log" Sep 29 14:58:19 crc kubenswrapper[4634]: I0929 14:58:19.245288 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-thjn4_572bf9da-bb03-48be-b902-48ea1755346d/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 14:58:19 crc kubenswrapper[4634]: I0929 14:58:19.883841 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bz22p" event={"ID":"342ec666-82a1-4ded-b424-610ef2c77a9f","Type":"ContainerStarted","Data":"e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5"} Sep 29 14:58:20 crc kubenswrapper[4634]: I0929 14:58:20.897185 4634 generic.go:334] "Generic (PLEG): container finished" podID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerID="e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5" exitCode=0 Sep 29 14:58:20 crc kubenswrapper[4634]: I0929 14:58:20.897556 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bz22p" event={"ID":"342ec666-82a1-4ded-b424-610ef2c77a9f","Type":"ContainerDied","Data":"e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5"} Sep 29 14:58:21 crc kubenswrapper[4634]: I0929 14:58:21.918069 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bz22p" event={"ID":"342ec666-82a1-4ded-b424-610ef2c77a9f","Type":"ContainerStarted","Data":"e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d"} Sep 29 14:58:21 crc kubenswrapper[4634]: I0929 14:58:21.955964 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bz22p" podStartSLOduration=2.373959633 podStartE2EDuration="4.955935016s" podCreationTimestamp="2025-09-29 14:58:17 +0000 UTC" firstStartedPulling="2025-09-29 14:58:18.871006305 +0000 UTC m=+4429.439734054" lastFinishedPulling="2025-09-29 14:58:21.452981688 +0000 UTC m=+4432.021709437" observedRunningTime="2025-09-29 14:58:21.943425162 +0000 UTC m=+4432.512152911" watchObservedRunningTime="2025-09-29 14:58:21.955935016 +0000 UTC m=+4432.524662765" Sep 29 14:58:27 crc kubenswrapper[4634]: I0929 14:58:27.110266 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:58:27 crc kubenswrapper[4634]: E0929 14:58:27.111572 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:58:27 crc kubenswrapper[4634]: I0929 14:58:27.436592 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:27 crc kubenswrapper[4634]: I0929 14:58:27.437021 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:27 crc kubenswrapper[4634]: I0929 14:58:27.524324 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:28 crc kubenswrapper[4634]: I0929 14:58:28.092061 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:28 crc kubenswrapper[4634]: I0929 14:58:28.165482 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bz22p"] Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.068074 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bz22p" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerName="registry-server" containerID="cri-o://e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d" gracePeriod=2 Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.657634 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.752995 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-utilities\") pod \"342ec666-82a1-4ded-b424-610ef2c77a9f\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.753057 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-catalog-content\") pod \"342ec666-82a1-4ded-b424-610ef2c77a9f\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.753130 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psjpc\" (UniqueName: \"kubernetes.io/projected/342ec666-82a1-4ded-b424-610ef2c77a9f-kube-api-access-psjpc\") pod \"342ec666-82a1-4ded-b424-610ef2c77a9f\" (UID: \"342ec666-82a1-4ded-b424-610ef2c77a9f\") " Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.754779 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-utilities" (OuterVolumeSpecName: "utilities") pod "342ec666-82a1-4ded-b424-610ef2c77a9f" (UID: "342ec666-82a1-4ded-b424-610ef2c77a9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.771438 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/342ec666-82a1-4ded-b424-610ef2c77a9f-kube-api-access-psjpc" (OuterVolumeSpecName: "kube-api-access-psjpc") pod "342ec666-82a1-4ded-b424-610ef2c77a9f" (UID: "342ec666-82a1-4ded-b424-610ef2c77a9f"). InnerVolumeSpecName "kube-api-access-psjpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.781054 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "342ec666-82a1-4ded-b424-610ef2c77a9f" (UID: "342ec666-82a1-4ded-b424-610ef2c77a9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.855741 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psjpc\" (UniqueName: \"kubernetes.io/projected/342ec666-82a1-4ded-b424-610ef2c77a9f-kube-api-access-psjpc\") on node \"crc\" DevicePath \"\"" Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.855782 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 14:58:30 crc kubenswrapper[4634]: I0929 14:58:30.855795 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/342ec666-82a1-4ded-b424-610ef2c77a9f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.080930 4634 generic.go:334] "Generic (PLEG): container finished" podID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerID="e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d" exitCode=0 Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.080982 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bz22p" event={"ID":"342ec666-82a1-4ded-b424-610ef2c77a9f","Type":"ContainerDied","Data":"e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d"} Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.080992 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bz22p" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.081022 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bz22p" event={"ID":"342ec666-82a1-4ded-b424-610ef2c77a9f","Type":"ContainerDied","Data":"4cab9c9f7db78df2774cb3f0b1392a59b797c83cd9617ed20b30b7723f150055"} Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.081045 4634 scope.go:117] "RemoveContainer" containerID="e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.107046 4634 scope.go:117] "RemoveContainer" containerID="e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.129136 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bz22p"] Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.132938 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bz22p"] Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.141077 4634 scope.go:117] "RemoveContainer" containerID="e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.215443 4634 scope.go:117] "RemoveContainer" containerID="e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d" Sep 29 14:58:31 crc kubenswrapper[4634]: E0929 14:58:31.216214 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d\": container with ID starting with e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d not found: ID does not exist" containerID="e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.216281 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d"} err="failed to get container status \"e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d\": rpc error: code = NotFound desc = could not find container \"e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d\": container with ID starting with e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d not found: ID does not exist" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.216317 4634 scope.go:117] "RemoveContainer" containerID="e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5" Sep 29 14:58:31 crc kubenswrapper[4634]: E0929 14:58:31.219320 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5\": container with ID starting with e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5 not found: ID does not exist" containerID="e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.219379 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5"} err="failed to get container status \"e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5\": rpc error: code = NotFound desc = could not find container \"e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5\": container with ID starting with e5e39284e22933badf11f4c4537013df14e951e01c96ac6fbb2878ef30f312a5 not found: ID does not exist" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.219410 4634 scope.go:117] "RemoveContainer" containerID="e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97" Sep 29 14:58:31 crc kubenswrapper[4634]: E0929 14:58:31.221137 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97\": container with ID starting with e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97 not found: ID does not exist" containerID="e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97" Sep 29 14:58:31 crc kubenswrapper[4634]: I0929 14:58:31.221170 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97"} err="failed to get container status \"e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97\": rpc error: code = NotFound desc = could not find container \"e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97\": container with ID starting with e0bb67acee84d00829af1969aa794da6287ec966b736568dc9a105980ab2ea97 not found: ID does not exist" Sep 29 14:58:32 crc kubenswrapper[4634]: I0929 14:58:32.120915 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" path="/var/lib/kubelet/pods/342ec666-82a1-4ded-b424-610ef2c77a9f/volumes" Sep 29 14:58:39 crc kubenswrapper[4634]: E0929 14:58:39.696952 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod342ec666_82a1_4ded_b424_610ef2c77a9f.slice/crio-conmon-e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 14:58:40 crc kubenswrapper[4634]: I0929 14:58:40.117381 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:58:40 crc kubenswrapper[4634]: E0929 14:58:40.117785 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:58:49 crc kubenswrapper[4634]: E0929 14:58:49.972689 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod342ec666_82a1_4ded_b424_610ef2c77a9f.slice/crio-conmon-e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 14:58:53 crc kubenswrapper[4634]: I0929 14:58:53.115003 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:58:53 crc kubenswrapper[4634]: E0929 14:58:53.116167 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:59:00 crc kubenswrapper[4634]: E0929 14:59:00.312576 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod342ec666_82a1_4ded_b424_610ef2c77a9f.slice/crio-conmon-e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.164658 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-x8pwp"] Sep 29 14:59:02 crc kubenswrapper[4634]: E0929 14:59:02.165705 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerName="extract-content" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.165724 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerName="extract-content" Sep 29 14:59:02 crc kubenswrapper[4634]: E0929 14:59:02.165752 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerName="registry-server" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.165761 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerName="registry-server" Sep 29 14:59:02 crc kubenswrapper[4634]: E0929 14:59:02.165792 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerName="extract-utilities" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.165800 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerName="extract-utilities" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.166045 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="342ec666-82a1-4ded-b424-610ef2c77a9f" containerName="registry-server" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.167622 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.200764 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x8pwp"] Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.370418 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-utilities\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.370525 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x49wt\" (UniqueName: \"kubernetes.io/projected/34f68c08-f4cb-4426-bf1a-3016aa89add6-kube-api-access-x49wt\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.370573 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-catalog-content\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.472298 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-utilities\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.472793 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x49wt\" (UniqueName: \"kubernetes.io/projected/34f68c08-f4cb-4426-bf1a-3016aa89add6-kube-api-access-x49wt\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.472833 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-catalog-content\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.472847 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-utilities\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.473353 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-catalog-content\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:02 crc kubenswrapper[4634]: I0929 14:59:02.973350 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x49wt\" (UniqueName: \"kubernetes.io/projected/34f68c08-f4cb-4426-bf1a-3016aa89add6-kube-api-access-x49wt\") pod \"redhat-operators-x8pwp\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:03 crc kubenswrapper[4634]: I0929 14:59:03.096801 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:03 crc kubenswrapper[4634]: I0929 14:59:03.719966 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-x8pwp"] Sep 29 14:59:04 crc kubenswrapper[4634]: I0929 14:59:04.470559 4634 generic.go:334] "Generic (PLEG): container finished" podID="a994c0c1-48af-45d9-971d-3614484c0c43" containerID="6aec0edd364710757820b78be0499b2b4c9a98895ba9136207d08d2045d2b8fb" exitCode=0 Sep 29 14:59:04 crc kubenswrapper[4634]: I0929 14:59:04.470751 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-bz7df" event={"ID":"a994c0c1-48af-45d9-971d-3614484c0c43","Type":"ContainerDied","Data":"6aec0edd364710757820b78be0499b2b4c9a98895ba9136207d08d2045d2b8fb"} Sep 29 14:59:04 crc kubenswrapper[4634]: I0929 14:59:04.475688 4634 generic.go:334] "Generic (PLEG): container finished" podID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerID="d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d" exitCode=0 Sep 29 14:59:04 crc kubenswrapper[4634]: I0929 14:59:04.475763 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8pwp" event={"ID":"34f68c08-f4cb-4426-bf1a-3016aa89add6","Type":"ContainerDied","Data":"d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d"} Sep 29 14:59:04 crc kubenswrapper[4634]: I0929 14:59:04.475810 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8pwp" event={"ID":"34f68c08-f4cb-4426-bf1a-3016aa89add6","Type":"ContainerStarted","Data":"e9de084b8801f51063c70d0674a0973f12e29bd87e1d71a675fe8ea28de17e9e"} Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.730395 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.770404 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-bz7df"] Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.779318 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-bz7df"] Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.853381 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmr2l\" (UniqueName: \"kubernetes.io/projected/a994c0c1-48af-45d9-971d-3614484c0c43-kube-api-access-kmr2l\") pod \"a994c0c1-48af-45d9-971d-3614484c0c43\" (UID: \"a994c0c1-48af-45d9-971d-3614484c0c43\") " Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.853631 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a994c0c1-48af-45d9-971d-3614484c0c43-host\") pod \"a994c0c1-48af-45d9-971d-3614484c0c43\" (UID: \"a994c0c1-48af-45d9-971d-3614484c0c43\") " Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.853943 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a994c0c1-48af-45d9-971d-3614484c0c43-host" (OuterVolumeSpecName: "host") pod "a994c0c1-48af-45d9-971d-3614484c0c43" (UID: "a994c0c1-48af-45d9-971d-3614484c0c43"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.854448 4634 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a994c0c1-48af-45d9-971d-3614484c0c43-host\") on node \"crc\" DevicePath \"\"" Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.860563 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a994c0c1-48af-45d9-971d-3614484c0c43-kube-api-access-kmr2l" (OuterVolumeSpecName: "kube-api-access-kmr2l") pod "a994c0c1-48af-45d9-971d-3614484c0c43" (UID: "a994c0c1-48af-45d9-971d-3614484c0c43"). InnerVolumeSpecName "kube-api-access-kmr2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:59:05 crc kubenswrapper[4634]: I0929 14:59:05.956319 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmr2l\" (UniqueName: \"kubernetes.io/projected/a994c0c1-48af-45d9-971d-3614484c0c43-kube-api-access-kmr2l\") on node \"crc\" DevicePath \"\"" Sep 29 14:59:06 crc kubenswrapper[4634]: I0929 14:59:06.114738 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:59:06 crc kubenswrapper[4634]: E0929 14:59:06.115340 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:59:06 crc kubenswrapper[4634]: I0929 14:59:06.125201 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a994c0c1-48af-45d9-971d-3614484c0c43" path="/var/lib/kubelet/pods/a994c0c1-48af-45d9-971d-3614484c0c43/volumes" Sep 29 14:59:06 crc kubenswrapper[4634]: I0929 14:59:06.502023 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-bz7df" Sep 29 14:59:06 crc kubenswrapper[4634]: I0929 14:59:06.502062 4634 scope.go:117] "RemoveContainer" containerID="6aec0edd364710757820b78be0499b2b4c9a98895ba9136207d08d2045d2b8fb" Sep 29 14:59:06 crc kubenswrapper[4634]: I0929 14:59:06.505404 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8pwp" event={"ID":"34f68c08-f4cb-4426-bf1a-3016aa89add6","Type":"ContainerStarted","Data":"1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a"} Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.085353 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-bnqvw"] Sep 29 14:59:07 crc kubenswrapper[4634]: E0929 14:59:07.086771 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a994c0c1-48af-45d9-971d-3614484c0c43" containerName="container-00" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.086880 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a994c0c1-48af-45d9-971d-3614484c0c43" containerName="container-00" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.087276 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a994c0c1-48af-45d9-971d-3614484c0c43" containerName="container-00" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.088247 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.186620 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-host\") pod \"crc-debug-bnqvw\" (UID: \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\") " pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.187007 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdgv7\" (UniqueName: \"kubernetes.io/projected/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-kube-api-access-tdgv7\") pod \"crc-debug-bnqvw\" (UID: \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\") " pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.287907 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-host\") pod \"crc-debug-bnqvw\" (UID: \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\") " pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.287982 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdgv7\" (UniqueName: \"kubernetes.io/projected/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-kube-api-access-tdgv7\") pod \"crc-debug-bnqvw\" (UID: \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\") " pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.288305 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-host\") pod \"crc-debug-bnqvw\" (UID: \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\") " pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.310114 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdgv7\" (UniqueName: \"kubernetes.io/projected/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-kube-api-access-tdgv7\") pod \"crc-debug-bnqvw\" (UID: \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\") " pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.407143 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:07 crc kubenswrapper[4634]: W0929 14:59:07.436682 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8598a2ee_e524_4dc3_ace2_3a68cfc7c57c.slice/crio-0f572c5a7f75f1cd5f1743a9a03904a3d70e32d09dacdc286ba6bcf4ee052efc WatchSource:0}: Error finding container 0f572c5a7f75f1cd5f1743a9a03904a3d70e32d09dacdc286ba6bcf4ee052efc: Status 404 returned error can't find the container with id 0f572c5a7f75f1cd5f1743a9a03904a3d70e32d09dacdc286ba6bcf4ee052efc Sep 29 14:59:07 crc kubenswrapper[4634]: I0929 14:59:07.520114 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" event={"ID":"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c","Type":"ContainerStarted","Data":"0f572c5a7f75f1cd5f1743a9a03904a3d70e32d09dacdc286ba6bcf4ee052efc"} Sep 29 14:59:08 crc kubenswrapper[4634]: I0929 14:59:08.537351 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" event={"ID":"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c","Type":"ContainerStarted","Data":"aeea3934f3bc6c933e75f8987521e98de9912bb74a6f2860222ec76d8921f6ce"} Sep 29 14:59:08 crc kubenswrapper[4634]: I0929 14:59:08.572468 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" podStartSLOduration=1.5724398160000002 podStartE2EDuration="1.572439816s" podCreationTimestamp="2025-09-29 14:59:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 14:59:08.561855015 +0000 UTC m=+4479.130582774" watchObservedRunningTime="2025-09-29 14:59:08.572439816 +0000 UTC m=+4479.141167555" Sep 29 14:59:10 crc kubenswrapper[4634]: I0929 14:59:10.574072 4634 generic.go:334] "Generic (PLEG): container finished" podID="8598a2ee-e524-4dc3-ace2-3a68cfc7c57c" containerID="aeea3934f3bc6c933e75f8987521e98de9912bb74a6f2860222ec76d8921f6ce" exitCode=0 Sep 29 14:59:10 crc kubenswrapper[4634]: I0929 14:59:10.574441 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" event={"ID":"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c","Type":"ContainerDied","Data":"aeea3934f3bc6c933e75f8987521e98de9912bb74a6f2860222ec76d8921f6ce"} Sep 29 14:59:10 crc kubenswrapper[4634]: E0929 14:59:10.721033 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod342ec666_82a1_4ded_b424_610ef2c77a9f.slice/crio-conmon-e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.589530 4634 generic.go:334] "Generic (PLEG): container finished" podID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerID="1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a" exitCode=0 Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.589588 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8pwp" event={"ID":"34f68c08-f4cb-4426-bf1a-3016aa89add6","Type":"ContainerDied","Data":"1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a"} Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.706959 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.893349 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdgv7\" (UniqueName: \"kubernetes.io/projected/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-kube-api-access-tdgv7\") pod \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\" (UID: \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\") " Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.893450 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-host\") pod \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\" (UID: \"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c\") " Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.893680 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-host" (OuterVolumeSpecName: "host") pod "8598a2ee-e524-4dc3-ace2-3a68cfc7c57c" (UID: "8598a2ee-e524-4dc3-ace2-3a68cfc7c57c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.894168 4634 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-host\") on node \"crc\" DevicePath \"\"" Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.903506 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-kube-api-access-tdgv7" (OuterVolumeSpecName: "kube-api-access-tdgv7") pod "8598a2ee-e524-4dc3-ace2-3a68cfc7c57c" (UID: "8598a2ee-e524-4dc3-ace2-3a68cfc7c57c"). InnerVolumeSpecName "kube-api-access-tdgv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:59:11 crc kubenswrapper[4634]: I0929 14:59:11.995890 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdgv7\" (UniqueName: \"kubernetes.io/projected/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c-kube-api-access-tdgv7\") on node \"crc\" DevicePath \"\"" Sep 29 14:59:12 crc kubenswrapper[4634]: I0929 14:59:12.602050 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" event={"ID":"8598a2ee-e524-4dc3-ace2-3a68cfc7c57c","Type":"ContainerDied","Data":"0f572c5a7f75f1cd5f1743a9a03904a3d70e32d09dacdc286ba6bcf4ee052efc"} Sep 29 14:59:12 crc kubenswrapper[4634]: I0929 14:59:12.602140 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f572c5a7f75f1cd5f1743a9a03904a3d70e32d09dacdc286ba6bcf4ee052efc" Sep 29 14:59:12 crc kubenswrapper[4634]: I0929 14:59:12.602232 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-bnqvw" Sep 29 14:59:12 crc kubenswrapper[4634]: I0929 14:59:12.611393 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8pwp" event={"ID":"34f68c08-f4cb-4426-bf1a-3016aa89add6","Type":"ContainerStarted","Data":"8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b"} Sep 29 14:59:12 crc kubenswrapper[4634]: I0929 14:59:12.645785 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-x8pwp" podStartSLOduration=3.070522927 podStartE2EDuration="10.645758499s" podCreationTimestamp="2025-09-29 14:59:02 +0000 UTC" firstStartedPulling="2025-09-29 14:59:04.479292057 +0000 UTC m=+4475.048019806" lastFinishedPulling="2025-09-29 14:59:12.054527629 +0000 UTC m=+4482.623255378" observedRunningTime="2025-09-29 14:59:12.639106567 +0000 UTC m=+4483.207834316" watchObservedRunningTime="2025-09-29 14:59:12.645758499 +0000 UTC m=+4483.214486248" Sep 29 14:59:13 crc kubenswrapper[4634]: I0929 14:59:13.096950 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:13 crc kubenswrapper[4634]: I0929 14:59:13.097745 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 14:59:14 crc kubenswrapper[4634]: I0929 14:59:14.148785 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x8pwp" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" probeResult="failure" output=< Sep 29 14:59:14 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:59:14 crc kubenswrapper[4634]: > Sep 29 14:59:16 crc kubenswrapper[4634]: I0929 14:59:16.944047 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-bnqvw"] Sep 29 14:59:16 crc kubenswrapper[4634]: I0929 14:59:16.953648 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-bnqvw"] Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.111498 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:59:18 crc kubenswrapper[4634]: E0929 14:59:18.112691 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.125388 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8598a2ee-e524-4dc3-ace2-3a68cfc7c57c" path="/var/lib/kubelet/pods/8598a2ee-e524-4dc3-ace2-3a68cfc7c57c/volumes" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.318862 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-vj74p"] Sep 29 14:59:18 crc kubenswrapper[4634]: E0929 14:59:18.319388 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8598a2ee-e524-4dc3-ace2-3a68cfc7c57c" containerName="container-00" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.319407 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="8598a2ee-e524-4dc3-ace2-3a68cfc7c57c" containerName="container-00" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.319689 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="8598a2ee-e524-4dc3-ace2-3a68cfc7c57c" containerName="container-00" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.320517 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.346983 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trqjm\" (UniqueName: \"kubernetes.io/projected/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-kube-api-access-trqjm\") pod \"crc-debug-vj74p\" (UID: \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\") " pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.347074 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-host\") pod \"crc-debug-vj74p\" (UID: \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\") " pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.448684 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-host\") pod \"crc-debug-vj74p\" (UID: \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\") " pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.448828 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trqjm\" (UniqueName: \"kubernetes.io/projected/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-kube-api-access-trqjm\") pod \"crc-debug-vj74p\" (UID: \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\") " pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.448987 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-host\") pod \"crc-debug-vj74p\" (UID: \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\") " pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.475404 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trqjm\" (UniqueName: \"kubernetes.io/projected/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-kube-api-access-trqjm\") pod \"crc-debug-vj74p\" (UID: \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\") " pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:18 crc kubenswrapper[4634]: I0929 14:59:18.645637 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:19 crc kubenswrapper[4634]: I0929 14:59:19.687520 4634 generic.go:334] "Generic (PLEG): container finished" podID="b3ea7e21-dd22-465e-8012-a2f908fd0ca0" containerID="1b3e12ab00b740b21b61a6ae6b82a0ab82f97f62f5e73ea5b9ca910a0691f025" exitCode=0 Sep 29 14:59:19 crc kubenswrapper[4634]: I0929 14:59:19.687631 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-vj74p" event={"ID":"b3ea7e21-dd22-465e-8012-a2f908fd0ca0","Type":"ContainerDied","Data":"1b3e12ab00b740b21b61a6ae6b82a0ab82f97f62f5e73ea5b9ca910a0691f025"} Sep 29 14:59:19 crc kubenswrapper[4634]: I0929 14:59:19.687880 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/crc-debug-vj74p" event={"ID":"b3ea7e21-dd22-465e-8012-a2f908fd0ca0","Type":"ContainerStarted","Data":"88604c43e26acf836960b5fe880f58748fc3b60fb85f9a77ca3143647e97fce3"} Sep 29 14:59:19 crc kubenswrapper[4634]: I0929 14:59:19.750173 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-vj74p"] Sep 29 14:59:19 crc kubenswrapper[4634]: I0929 14:59:19.762280 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mfrz5/crc-debug-vj74p"] Sep 29 14:59:20 crc kubenswrapper[4634]: I0929 14:59:20.826856 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:20 crc kubenswrapper[4634]: I0929 14:59:20.922055 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trqjm\" (UniqueName: \"kubernetes.io/projected/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-kube-api-access-trqjm\") pod \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\" (UID: \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\") " Sep 29 14:59:20 crc kubenswrapper[4634]: I0929 14:59:20.922457 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-host\") pod \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\" (UID: \"b3ea7e21-dd22-465e-8012-a2f908fd0ca0\") " Sep 29 14:59:20 crc kubenswrapper[4634]: I0929 14:59:20.922509 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-host" (OuterVolumeSpecName: "host") pod "b3ea7e21-dd22-465e-8012-a2f908fd0ca0" (UID: "b3ea7e21-dd22-465e-8012-a2f908fd0ca0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 14:59:20 crc kubenswrapper[4634]: I0929 14:59:20.923353 4634 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-host\") on node \"crc\" DevicePath \"\"" Sep 29 14:59:20 crc kubenswrapper[4634]: I0929 14:59:20.934473 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-kube-api-access-trqjm" (OuterVolumeSpecName: "kube-api-access-trqjm") pod "b3ea7e21-dd22-465e-8012-a2f908fd0ca0" (UID: "b3ea7e21-dd22-465e-8012-a2f908fd0ca0"). InnerVolumeSpecName "kube-api-access-trqjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 14:59:21 crc kubenswrapper[4634]: I0929 14:59:21.025036 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trqjm\" (UniqueName: \"kubernetes.io/projected/b3ea7e21-dd22-465e-8012-a2f908fd0ca0-kube-api-access-trqjm\") on node \"crc\" DevicePath \"\"" Sep 29 14:59:21 crc kubenswrapper[4634]: E0929 14:59:21.041881 4634 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod342ec666_82a1_4ded_b424_610ef2c77a9f.slice/crio-conmon-e28b626a0ff7bd690ddf0ccce2757e1a49f4aa06de66ee75e44d6e1c5a08993d.scope\": RecentStats: unable to find data in memory cache]" Sep 29 14:59:21 crc kubenswrapper[4634]: I0929 14:59:21.718003 4634 scope.go:117] "RemoveContainer" containerID="1b3e12ab00b740b21b61a6ae6b82a0ab82f97f62f5e73ea5b9ca910a0691f025" Sep 29 14:59:21 crc kubenswrapper[4634]: I0929 14:59:21.718129 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/crc-debug-vj74p" Sep 29 14:59:22 crc kubenswrapper[4634]: I0929 14:59:22.122471 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3ea7e21-dd22-465e-8012-a2f908fd0ca0" path="/var/lib/kubelet/pods/b3ea7e21-dd22-465e-8012-a2f908fd0ca0/volumes" Sep 29 14:59:22 crc kubenswrapper[4634]: I0929 14:59:22.814255 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/util/0.log" Sep 29 14:59:23 crc kubenswrapper[4634]: I0929 14:59:23.059571 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/pull/0.log" Sep 29 14:59:23 crc kubenswrapper[4634]: I0929 14:59:23.071245 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/util/0.log" Sep 29 14:59:23 crc kubenswrapper[4634]: I0929 14:59:23.072952 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/pull/0.log" Sep 29 14:59:23 crc kubenswrapper[4634]: I0929 14:59:23.382590 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/extract/0.log" Sep 29 14:59:23 crc kubenswrapper[4634]: I0929 14:59:23.509164 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/util/0.log" Sep 29 14:59:23 crc kubenswrapper[4634]: I0929 14:59:23.516641 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/pull/0.log" Sep 29 14:59:23 crc kubenswrapper[4634]: I0929 14:59:23.705612 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6495d75b5-2nbgf_7fb4797f-f58b-425a-a987-4559c9d5d481/kube-rbac-proxy/0.log" Sep 29 14:59:23 crc kubenswrapper[4634]: I0929 14:59:23.912055 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748c574d75-h2wqv_d82e90ad-ac20-415a-9b7e-168e6472f2a8/kube-rbac-proxy/0.log" Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.151619 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x8pwp" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" probeResult="failure" output=< Sep 29 14:59:24 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:59:24 crc kubenswrapper[4634]: > Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.213390 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748c574d75-h2wqv_d82e90ad-ac20-415a-9b7e-168e6472f2a8/manager/0.log" Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.231772 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6495d75b5-2nbgf_7fb4797f-f58b-425a-a987-4559c9d5d481/manager/0.log" Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.339479 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d74f4d695-w2nj4_fc9290c5-62eb-4b93-8b0f-032c2474510f/kube-rbac-proxy/0.log" Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.544035 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d74f4d695-w2nj4_fc9290c5-62eb-4b93-8b0f-032c2474510f/manager/0.log" Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.613559 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67b5d44b7f-fzzjz_12b1701c-523e-428c-817b-f0ae4914b9fb/kube-rbac-proxy/0.log" Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.680101 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67b5d44b7f-fzzjz_12b1701c-523e-428c-817b-f0ae4914b9fb/manager/0.log" Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.885909 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-8ff95898-b7s9w_8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15/manager/0.log" Sep 29 14:59:24 crc kubenswrapper[4634]: I0929 14:59:24.962623 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-8ff95898-b7s9w_8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15/kube-rbac-proxy/0.log" Sep 29 14:59:25 crc kubenswrapper[4634]: I0929 14:59:25.266456 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-695847bc78-g6ncd_ba50e2d0-3018-4591-81fd-9e31c5d39951/manager/0.log" Sep 29 14:59:25 crc kubenswrapper[4634]: I0929 14:59:25.301962 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-695847bc78-g6ncd_ba50e2d0-3018-4591-81fd-9e31c5d39951/kube-rbac-proxy/0.log" Sep 29 14:59:25 crc kubenswrapper[4634]: I0929 14:59:25.360669 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-858cd69f49-7v24f_e220a6dd-ab23-4eeb-9cb7-8496c72cc19f/kube-rbac-proxy/0.log" Sep 29 14:59:25 crc kubenswrapper[4634]: I0929 14:59:25.616355 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9fc8d5567-c68h7_c00a2f33-36be-4039-a5ae-73df39f84d1d/kube-rbac-proxy/0.log" Sep 29 14:59:25 crc kubenswrapper[4634]: I0929 14:59:25.694511 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-858cd69f49-7v24f_e220a6dd-ab23-4eeb-9cb7-8496c72cc19f/manager/0.log" Sep 29 14:59:25 crc kubenswrapper[4634]: I0929 14:59:25.737721 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9fc8d5567-c68h7_c00a2f33-36be-4039-a5ae-73df39f84d1d/manager/0.log" Sep 29 14:59:26 crc kubenswrapper[4634]: I0929 14:59:26.021938 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7bf498966c-9l547_3fce3aee-b45a-4d80-a2e5-529632ed8a2d/kube-rbac-proxy/0.log" Sep 29 14:59:26 crc kubenswrapper[4634]: I0929 14:59:26.115757 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7bf498966c-9l547_3fce3aee-b45a-4d80-a2e5-529632ed8a2d/manager/0.log" Sep 29 14:59:26 crc kubenswrapper[4634]: I0929 14:59:26.490377 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-56cf9c6b99-bd4mn_ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df/kube-rbac-proxy/0.log" Sep 29 14:59:26 crc kubenswrapper[4634]: I0929 14:59:26.585003 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-56cf9c6b99-bd4mn_ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df/manager/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.117535 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-687b9cf756-hd2mv_0b0b3b6f-0579-4a42-bad2-ecbda8906426/kube-rbac-proxy/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.134733 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-687b9cf756-hd2mv_0b0b3b6f-0579-4a42-bad2-ecbda8906426/manager/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.419201 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54d766c9f9-k2dqf_be9fbcb2-15d0-4fc2-b745-41178d406fca/kube-rbac-proxy/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.445673 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-9z55w_11b77d0f-14f2-47d2-839a-6e06505787a2/kube-rbac-proxy/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.548067 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54d766c9f9-k2dqf_be9fbcb2-15d0-4fc2-b745-41178d406fca/manager/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.550394 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-9z55w_11b77d0f-14f2-47d2-839a-6e06505787a2/manager/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.764436 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-7c4mq_65cef236-09ce-4623-9cd8-9d4c0e1f8346/kube-rbac-proxy/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.843452 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-7c4mq_65cef236-09ce-4623-9cd8-9d4c0e1f8346/manager/0.log" Sep 29 14:59:27 crc kubenswrapper[4634]: I0929 14:59:27.865215 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-rq5pg_e6c834dc-3418-4d52-ade3-02c1043d6360/kube-rbac-proxy/0.log" Sep 29 14:59:28 crc kubenswrapper[4634]: I0929 14:59:28.087051 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-667746d855-fd8px_63fb32c1-31c4-4ab0-b10e-c467e2c74410/kube-rbac-proxy/0.log" Sep 29 14:59:28 crc kubenswrapper[4634]: I0929 14:59:28.092727 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-rq5pg_e6c834dc-3418-4d52-ade3-02c1043d6360/manager/0.log" Sep 29 14:59:28 crc kubenswrapper[4634]: I0929 14:59:28.407881 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fc7b59957-prtng_aea3000a-d973-4f2b-a521-dd3313901830/kube-rbac-proxy/0.log" Sep 29 14:59:28 crc kubenswrapper[4634]: I0929 14:59:28.794974 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fc7b59957-prtng_aea3000a-d973-4f2b-a521-dd3313901830/operator/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.095198 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-nzrgv_90d0c015-fc7c-4d00-b1a2-83a4e0d68ada/registry-server/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.244348 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5f95c46c78-2vb6t_0808341c-4037-4360-bc34-dce11a7e8088/kube-rbac-proxy/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.290511 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5f95c46c78-2vb6t_0808341c-4037-4360-bc34-dce11a7e8088/manager/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.537689 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-774b97b48-66mbm_e777128b-ae24-469f-81bb-adf78608f20e/kube-rbac-proxy/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.551301 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-667746d855-fd8px_63fb32c1-31c4-4ab0-b10e-c467e2c74410/manager/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.593901 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-774b97b48-66mbm_e777128b-ae24-469f-81bb-adf78608f20e/manager/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.790510 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-6gkh8_b108e534-0a60-4d24-a6b3-9b967045469a/kube-rbac-proxy/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.838749 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-cvk6x_c5c15e4b-b806-4d39-915f-c6e60e6d72ea/operator/0.log" Sep 29 14:59:29 crc kubenswrapper[4634]: I0929 14:59:29.954998 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-6gkh8_b108e534-0a60-4d24-a6b3-9b967045469a/manager/0.log" Sep 29 14:59:30 crc kubenswrapper[4634]: I0929 14:59:30.069534 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5bf96cfbc4-2dp75_97de340e-634f-47e2-8a37-800f2261e43b/kube-rbac-proxy/0.log" Sep 29 14:59:30 crc kubenswrapper[4634]: I0929 14:59:30.125885 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:59:30 crc kubenswrapper[4634]: E0929 14:59:30.126288 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:59:30 crc kubenswrapper[4634]: I0929 14:59:30.201536 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5bf96cfbc4-2dp75_97de340e-634f-47e2-8a37-800f2261e43b/manager/0.log" Sep 29 14:59:30 crc kubenswrapper[4634]: I0929 14:59:30.272041 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-knb5c_6cb280b6-d86c-42cb-8887-819b38c304b8/kube-rbac-proxy/0.log" Sep 29 14:59:30 crc kubenswrapper[4634]: I0929 14:59:30.272779 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-knb5c_6cb280b6-d86c-42cb-8887-819b38c304b8/manager/0.log" Sep 29 14:59:30 crc kubenswrapper[4634]: I0929 14:59:30.472116 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-7w4vx_45b61a8e-44b5-4cca-85b6-344738b51f52/kube-rbac-proxy/0.log" Sep 29 14:59:30 crc kubenswrapper[4634]: I0929 14:59:30.505346 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-7w4vx_45b61a8e-44b5-4cca-85b6-344738b51f52/manager/0.log" Sep 29 14:59:34 crc kubenswrapper[4634]: I0929 14:59:34.156252 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x8pwp" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" probeResult="failure" output=< Sep 29 14:59:34 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:59:34 crc kubenswrapper[4634]: > Sep 29 14:59:43 crc kubenswrapper[4634]: I0929 14:59:43.111444 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:59:43 crc kubenswrapper[4634]: E0929 14:59:43.112388 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 14:59:44 crc kubenswrapper[4634]: I0929 14:59:44.151773 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x8pwp" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" probeResult="failure" output=< Sep 29 14:59:44 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:59:44 crc kubenswrapper[4634]: > Sep 29 14:59:50 crc kubenswrapper[4634]: I0929 14:59:50.065839 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-jq747_0c78cbc4-e705-490d-b453-9b1ec8a4ca07/control-plane-machine-set-operator/0.log" Sep 29 14:59:50 crc kubenswrapper[4634]: I0929 14:59:50.288257 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-27cm7_b02e5190-b670-4ec4-824f-a4f18cf79e33/kube-rbac-proxy/0.log" Sep 29 14:59:50 crc kubenswrapper[4634]: I0929 14:59:50.364074 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-27cm7_b02e5190-b670-4ec4-824f-a4f18cf79e33/machine-api-operator/0.log" Sep 29 14:59:54 crc kubenswrapper[4634]: I0929 14:59:54.155279 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x8pwp" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" probeResult="failure" output=< Sep 29 14:59:54 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 14:59:54 crc kubenswrapper[4634]: > Sep 29 14:59:56 crc kubenswrapper[4634]: I0929 14:59:56.110623 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 14:59:56 crc kubenswrapper[4634]: E0929 14:59:56.111540 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.184198 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb"] Sep 29 15:00:00 crc kubenswrapper[4634]: E0929 15:00:00.186824 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3ea7e21-dd22-465e-8012-a2f908fd0ca0" containerName="container-00" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.186857 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3ea7e21-dd22-465e-8012-a2f908fd0ca0" containerName="container-00" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.187080 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3ea7e21-dd22-465e-8012-a2f908fd0ca0" containerName="container-00" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.187929 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.193772 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.193995 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.209259 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb"] Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.346860 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vxjk\" (UniqueName: \"kubernetes.io/projected/4c03fa6f-627d-40d3-b9ce-5186793d5079-kube-api-access-5vxjk\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.346973 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c03fa6f-627d-40d3-b9ce-5186793d5079-secret-volume\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.347175 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c03fa6f-627d-40d3-b9ce-5186793d5079-config-volume\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.448946 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c03fa6f-627d-40d3-b9ce-5186793d5079-config-volume\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.449065 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vxjk\" (UniqueName: \"kubernetes.io/projected/4c03fa6f-627d-40d3-b9ce-5186793d5079-kube-api-access-5vxjk\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.449137 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c03fa6f-627d-40d3-b9ce-5186793d5079-secret-volume\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.450652 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c03fa6f-627d-40d3-b9ce-5186793d5079-config-volume\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.458842 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c03fa6f-627d-40d3-b9ce-5186793d5079-secret-volume\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.486034 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vxjk\" (UniqueName: \"kubernetes.io/projected/4c03fa6f-627d-40d3-b9ce-5186793d5079-kube-api-access-5vxjk\") pod \"collect-profiles-29319300-mvscb\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:00 crc kubenswrapper[4634]: I0929 15:00:00.525017 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:01 crc kubenswrapper[4634]: I0929 15:00:01.179360 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb"] Sep 29 15:00:02 crc kubenswrapper[4634]: I0929 15:00:02.161563 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" event={"ID":"4c03fa6f-627d-40d3-b9ce-5186793d5079","Type":"ContainerStarted","Data":"6ea61008e8447cb8c62a97cf6e50ba5547b7ce11b23246620181b2991055f24b"} Sep 29 15:00:02 crc kubenswrapper[4634]: I0929 15:00:02.162052 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" event={"ID":"4c03fa6f-627d-40d3-b9ce-5186793d5079","Type":"ContainerStarted","Data":"b00d9cdb5d9d9ba535001af5400cb6b2cca8199784be6ff23afdf3f1bc418a75"} Sep 29 15:00:03 crc kubenswrapper[4634]: I0929 15:00:03.172950 4634 generic.go:334] "Generic (PLEG): container finished" podID="4c03fa6f-627d-40d3-b9ce-5186793d5079" containerID="6ea61008e8447cb8c62a97cf6e50ba5547b7ce11b23246620181b2991055f24b" exitCode=0 Sep 29 15:00:03 crc kubenswrapper[4634]: I0929 15:00:03.173697 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" event={"ID":"4c03fa6f-627d-40d3-b9ce-5186793d5079","Type":"ContainerDied","Data":"6ea61008e8447cb8c62a97cf6e50ba5547b7ce11b23246620181b2991055f24b"} Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.166022 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-x8pwp" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" probeResult="failure" output=< Sep 29 15:00:04 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 15:00:04 crc kubenswrapper[4634]: > Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.671300 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.857423 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vxjk\" (UniqueName: \"kubernetes.io/projected/4c03fa6f-627d-40d3-b9ce-5186793d5079-kube-api-access-5vxjk\") pod \"4c03fa6f-627d-40d3-b9ce-5186793d5079\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.857490 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c03fa6f-627d-40d3-b9ce-5186793d5079-secret-volume\") pod \"4c03fa6f-627d-40d3-b9ce-5186793d5079\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.857554 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c03fa6f-627d-40d3-b9ce-5186793d5079-config-volume\") pod \"4c03fa6f-627d-40d3-b9ce-5186793d5079\" (UID: \"4c03fa6f-627d-40d3-b9ce-5186793d5079\") " Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.858871 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c03fa6f-627d-40d3-b9ce-5186793d5079-config-volume" (OuterVolumeSpecName: "config-volume") pod "4c03fa6f-627d-40d3-b9ce-5186793d5079" (UID: "4c03fa6f-627d-40d3-b9ce-5186793d5079"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.865315 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c03fa6f-627d-40d3-b9ce-5186793d5079-kube-api-access-5vxjk" (OuterVolumeSpecName: "kube-api-access-5vxjk") pod "4c03fa6f-627d-40d3-b9ce-5186793d5079" (UID: "4c03fa6f-627d-40d3-b9ce-5186793d5079"). InnerVolumeSpecName "kube-api-access-5vxjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.884814 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c03fa6f-627d-40d3-b9ce-5186793d5079-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4c03fa6f-627d-40d3-b9ce-5186793d5079" (UID: "4c03fa6f-627d-40d3-b9ce-5186793d5079"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.960539 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vxjk\" (UniqueName: \"kubernetes.io/projected/4c03fa6f-627d-40d3-b9ce-5186793d5079-kube-api-access-5vxjk\") on node \"crc\" DevicePath \"\"" Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.960586 4634 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4c03fa6f-627d-40d3-b9ce-5186793d5079-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 15:00:04 crc kubenswrapper[4634]: I0929 15:00:04.960601 4634 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4c03fa6f-627d-40d3-b9ce-5186793d5079-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 15:00:05 crc kubenswrapper[4634]: I0929 15:00:05.201077 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" event={"ID":"4c03fa6f-627d-40d3-b9ce-5186793d5079","Type":"ContainerDied","Data":"b00d9cdb5d9d9ba535001af5400cb6b2cca8199784be6ff23afdf3f1bc418a75"} Sep 29 15:00:05 crc kubenswrapper[4634]: I0929 15:00:05.201497 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b00d9cdb5d9d9ba535001af5400cb6b2cca8199784be6ff23afdf3f1bc418a75" Sep 29 15:00:05 crc kubenswrapper[4634]: I0929 15:00:05.201375 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319300-mvscb" Sep 29 15:00:05 crc kubenswrapper[4634]: I0929 15:00:05.290397 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk"] Sep 29 15:00:05 crc kubenswrapper[4634]: I0929 15:00:05.301078 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319255-xvgqk"] Sep 29 15:00:06 crc kubenswrapper[4634]: I0929 15:00:06.178712 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06dcdd4d-8ef9-43c8-924e-87fe5c67d329" path="/var/lib/kubelet/pods/06dcdd4d-8ef9-43c8-924e-87fe5c67d329/volumes" Sep 29 15:00:07 crc kubenswrapper[4634]: I0929 15:00:07.998928 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-2vbph_d2d8a3b4-5469-4e43-853a-68ea314698d5/cert-manager-controller/0.log" Sep 29 15:00:08 crc kubenswrapper[4634]: I0929 15:00:08.247736 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-fssvx_f7a64d90-3df0-4013-9334-10cb44b056d0/cert-manager-cainjector/0.log" Sep 29 15:00:08 crc kubenswrapper[4634]: I0929 15:00:08.387393 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-whcmj_952ce650-52ed-4dcb-88bb-d9f9ce5a69ed/cert-manager-webhook/0.log" Sep 29 15:00:11 crc kubenswrapper[4634]: I0929 15:00:11.110051 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 15:00:11 crc kubenswrapper[4634]: E0929 15:00:11.111047 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:00:13 crc kubenswrapper[4634]: I0929 15:00:13.164025 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 15:00:13 crc kubenswrapper[4634]: I0929 15:00:13.233286 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 15:00:13 crc kubenswrapper[4634]: I0929 15:00:13.420587 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x8pwp"] Sep 29 15:00:14 crc kubenswrapper[4634]: I0929 15:00:14.286705 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-x8pwp" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" containerID="cri-o://8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b" gracePeriod=2 Sep 29 15:00:14 crc kubenswrapper[4634]: I0929 15:00:14.797383 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 15:00:14 crc kubenswrapper[4634]: I0929 15:00:14.894827 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-utilities\") pod \"34f68c08-f4cb-4426-bf1a-3016aa89add6\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " Sep 29 15:00:14 crc kubenswrapper[4634]: I0929 15:00:14.894971 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x49wt\" (UniqueName: \"kubernetes.io/projected/34f68c08-f4cb-4426-bf1a-3016aa89add6-kube-api-access-x49wt\") pod \"34f68c08-f4cb-4426-bf1a-3016aa89add6\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " Sep 29 15:00:14 crc kubenswrapper[4634]: I0929 15:00:14.894994 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-catalog-content\") pod \"34f68c08-f4cb-4426-bf1a-3016aa89add6\" (UID: \"34f68c08-f4cb-4426-bf1a-3016aa89add6\") " Sep 29 15:00:14 crc kubenswrapper[4634]: I0929 15:00:14.897344 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-utilities" (OuterVolumeSpecName: "utilities") pod "34f68c08-f4cb-4426-bf1a-3016aa89add6" (UID: "34f68c08-f4cb-4426-bf1a-3016aa89add6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:00:14 crc kubenswrapper[4634]: I0929 15:00:14.924044 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34f68c08-f4cb-4426-bf1a-3016aa89add6-kube-api-access-x49wt" (OuterVolumeSpecName: "kube-api-access-x49wt") pod "34f68c08-f4cb-4426-bf1a-3016aa89add6" (UID: "34f68c08-f4cb-4426-bf1a-3016aa89add6"). InnerVolumeSpecName "kube-api-access-x49wt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.000775 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.000808 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x49wt\" (UniqueName: \"kubernetes.io/projected/34f68c08-f4cb-4426-bf1a-3016aa89add6-kube-api-access-x49wt\") on node \"crc\" DevicePath \"\"" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.020246 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "34f68c08-f4cb-4426-bf1a-3016aa89add6" (UID: "34f68c08-f4cb-4426-bf1a-3016aa89add6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.102912 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34f68c08-f4cb-4426-bf1a-3016aa89add6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.297790 4634 generic.go:334] "Generic (PLEG): container finished" podID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerID="8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b" exitCode=0 Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.297842 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8pwp" event={"ID":"34f68c08-f4cb-4426-bf1a-3016aa89add6","Type":"ContainerDied","Data":"8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b"} Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.297863 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-x8pwp" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.297885 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-x8pwp" event={"ID":"34f68c08-f4cb-4426-bf1a-3016aa89add6","Type":"ContainerDied","Data":"e9de084b8801f51063c70d0674a0973f12e29bd87e1d71a675fe8ea28de17e9e"} Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.297909 4634 scope.go:117] "RemoveContainer" containerID="8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.343583 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-x8pwp"] Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.345922 4634 scope.go:117] "RemoveContainer" containerID="1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.351355 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-x8pwp"] Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.372377 4634 scope.go:117] "RemoveContainer" containerID="d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.438846 4634 scope.go:117] "RemoveContainer" containerID="8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b" Sep 29 15:00:15 crc kubenswrapper[4634]: E0929 15:00:15.440408 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b\": container with ID starting with 8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b not found: ID does not exist" containerID="8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.440481 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b"} err="failed to get container status \"8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b\": rpc error: code = NotFound desc = could not find container \"8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b\": container with ID starting with 8b8e32d2d77958ec31f59b03ef69e1f19b79bf609c7015b0ab5f972bcdb8f18b not found: ID does not exist" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.440518 4634 scope.go:117] "RemoveContainer" containerID="1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a" Sep 29 15:00:15 crc kubenswrapper[4634]: E0929 15:00:15.441937 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a\": container with ID starting with 1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a not found: ID does not exist" containerID="1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.441963 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a"} err="failed to get container status \"1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a\": rpc error: code = NotFound desc = could not find container \"1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a\": container with ID starting with 1b94bfc30726d7978a1071e8aeee155b5ee2a0f0546746e412e68822d8895c3a not found: ID does not exist" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.441980 4634 scope.go:117] "RemoveContainer" containerID="d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d" Sep 29 15:00:15 crc kubenswrapper[4634]: E0929 15:00:15.442236 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d\": container with ID starting with d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d not found: ID does not exist" containerID="d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d" Sep 29 15:00:15 crc kubenswrapper[4634]: I0929 15:00:15.442266 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d"} err="failed to get container status \"d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d\": rpc error: code = NotFound desc = could not find container \"d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d\": container with ID starting with d461d357017d83b98e239e0ce4944989a6d59b1d6937cd9cf1f5d1cceacc7e7d not found: ID does not exist" Sep 29 15:00:16 crc kubenswrapper[4634]: I0929 15:00:16.124996 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" path="/var/lib/kubelet/pods/34f68c08-f4cb-4426-bf1a-3016aa89add6/volumes" Sep 29 15:00:26 crc kubenswrapper[4634]: I0929 15:00:26.124142 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 15:00:26 crc kubenswrapper[4634]: E0929 15:00:26.125780 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:00:26 crc kubenswrapper[4634]: I0929 15:00:26.636603 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-qz252_92541952-adc1-4f55-a7c7-14d68fd9df0d/nmstate-console-plugin/0.log" Sep 29 15:00:26 crc kubenswrapper[4634]: I0929 15:00:26.673897 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-przz9_915d1f27-b652-4527-9df6-c1a1ee347d9d/nmstate-handler/0.log" Sep 29 15:00:26 crc kubenswrapper[4634]: I0929 15:00:26.909785 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-6h8nl_b4bdb338-a719-4d83-a12a-f0b18a589d65/kube-rbac-proxy/0.log" Sep 29 15:00:26 crc kubenswrapper[4634]: I0929 15:00:26.929462 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-6h8nl_b4bdb338-a719-4d83-a12a-f0b18a589d65/nmstate-metrics/0.log" Sep 29 15:00:27 crc kubenswrapper[4634]: I0929 15:00:27.132903 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-qnlfb_805b844b-fda9-431c-a652-d9c6211769f3/nmstate-operator/0.log" Sep 29 15:00:27 crc kubenswrapper[4634]: I0929 15:00:27.264393 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-4hp98_a10543ec-dd79-4bc4-9330-ecca62f0dcde/nmstate-webhook/0.log" Sep 29 15:00:38 crc kubenswrapper[4634]: I0929 15:00:38.110638 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 15:00:38 crc kubenswrapper[4634]: E0929 15:00:38.111828 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:00:44 crc kubenswrapper[4634]: I0929 15:00:44.385823 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-vrscx_799ed420-430a-45c8-99a7-de9125bf452d/kube-rbac-proxy/0.log" Sep 29 15:00:44 crc kubenswrapper[4634]: I0929 15:00:44.516531 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-vrscx_799ed420-430a-45c8-99a7-de9125bf452d/controller/0.log" Sep 29 15:00:44 crc kubenswrapper[4634]: I0929 15:00:44.631922 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-frr-files/0.log" Sep 29 15:00:44 crc kubenswrapper[4634]: I0929 15:00:44.823461 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-reloader/0.log" Sep 29 15:00:44 crc kubenswrapper[4634]: I0929 15:00:44.865548 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-metrics/0.log" Sep 29 15:00:44 crc kubenswrapper[4634]: I0929 15:00:44.889721 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-frr-files/0.log" Sep 29 15:00:44 crc kubenswrapper[4634]: I0929 15:00:44.898211 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-reloader/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.173338 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-metrics/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.248253 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-reloader/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.272303 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-frr-files/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.282211 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-metrics/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.519187 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-reloader/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.537917 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/controller/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.601761 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-metrics/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.601798 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-frr-files/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.891986 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/frr-metrics/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.916599 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/kube-rbac-proxy/0.log" Sep 29 15:00:45 crc kubenswrapper[4634]: I0929 15:00:45.976142 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/kube-rbac-proxy-frr/0.log" Sep 29 15:00:46 crc kubenswrapper[4634]: I0929 15:00:46.185295 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/reloader/0.log" Sep 29 15:00:46 crc kubenswrapper[4634]: I0929 15:00:46.319543 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-7772m_c4a4e6c8-9854-47b3-b11e-41a9c78334a8/frr-k8s-webhook-server/0.log" Sep 29 15:00:46 crc kubenswrapper[4634]: I0929 15:00:46.582130 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-64444f645d-qz74c_a7e55c36-0f57-469f-8419-b9ccb4465010/manager/0.log" Sep 29 15:00:47 crc kubenswrapper[4634]: I0929 15:00:47.039071 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5ff45f5c66-t6xh9_03429314-d17f-4ffa-9d58-b89748690fec/webhook-server/0.log" Sep 29 15:00:47 crc kubenswrapper[4634]: I0929 15:00:47.051980 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fbwzx_e5394208-75d4-4a32-98c2-16299c7bf5fa/kube-rbac-proxy/0.log" Sep 29 15:00:47 crc kubenswrapper[4634]: I0929 15:00:47.080584 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/frr/0.log" Sep 29 15:00:47 crc kubenswrapper[4634]: I0929 15:00:47.625626 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fbwzx_e5394208-75d4-4a32-98c2-16299c7bf5fa/speaker/0.log" Sep 29 15:00:53 crc kubenswrapper[4634]: I0929 15:00:53.110779 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 15:00:53 crc kubenswrapper[4634]: E0929 15:00:53.111921 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:00:54 crc kubenswrapper[4634]: I0929 15:00:54.486171 4634 scope.go:117] "RemoveContainer" containerID="74154ed93de8288ee30f5d88849bf40dd98c32004779b9939a6baec5c775ed8a" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.179924 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319301-qdwsk"] Sep 29 15:01:00 crc kubenswrapper[4634]: E0929 15:01:00.181021 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c03fa6f-627d-40d3-b9ce-5186793d5079" containerName="collect-profiles" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.181041 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c03fa6f-627d-40d3-b9ce-5186793d5079" containerName="collect-profiles" Sep 29 15:01:00 crc kubenswrapper[4634]: E0929 15:01:00.181063 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="extract-utilities" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.181159 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="extract-utilities" Sep 29 15:01:00 crc kubenswrapper[4634]: E0929 15:01:00.181208 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.181218 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" Sep 29 15:01:00 crc kubenswrapper[4634]: E0929 15:01:00.181264 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="extract-content" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.181274 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="extract-content" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.181539 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="34f68c08-f4cb-4426-bf1a-3016aa89add6" containerName="registry-server" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.181575 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c03fa6f-627d-40d3-b9ce-5186793d5079" containerName="collect-profiles" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.182516 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.211233 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319301-qdwsk"] Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.277580 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-combined-ca-bundle\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.277622 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrgjc\" (UniqueName: \"kubernetes.io/projected/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-kube-api-access-wrgjc\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.278067 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-config-data\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.278644 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-fernet-keys\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.381105 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-combined-ca-bundle\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.381153 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrgjc\" (UniqueName: \"kubernetes.io/projected/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-kube-api-access-wrgjc\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.381176 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-config-data\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.381350 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-fernet-keys\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.397273 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-config-data\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.398489 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-fernet-keys\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.406072 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-combined-ca-bundle\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.412147 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrgjc\" (UniqueName: \"kubernetes.io/projected/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-kube-api-access-wrgjc\") pod \"keystone-cron-29319301-qdwsk\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:00 crc kubenswrapper[4634]: I0929 15:01:00.513698 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:01 crc kubenswrapper[4634]: I0929 15:01:01.046027 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319301-qdwsk"] Sep 29 15:01:01 crc kubenswrapper[4634]: I0929 15:01:01.792438 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319301-qdwsk" event={"ID":"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0","Type":"ContainerStarted","Data":"4c60b60aeb68845a5577dbddf2122c7b9d1e2aed51b8470555ec144c0e638cdd"} Sep 29 15:01:01 crc kubenswrapper[4634]: I0929 15:01:01.792896 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319301-qdwsk" event={"ID":"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0","Type":"ContainerStarted","Data":"7e21f8ffe2185293c4027fd4b857edb119327ea0d03afafb071b259887ee049a"} Sep 29 15:01:01 crc kubenswrapper[4634]: I0929 15:01:01.830301 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319301-qdwsk" podStartSLOduration=1.8302799140000001 podStartE2EDuration="1.830279914s" podCreationTimestamp="2025-09-29 15:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 15:01:01.827153599 +0000 UTC m=+4592.395881368" watchObservedRunningTime="2025-09-29 15:01:01.830279914 +0000 UTC m=+4592.399007663" Sep 29 15:01:04 crc kubenswrapper[4634]: I0929 15:01:04.376528 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/util/0.log" Sep 29 15:01:05 crc kubenswrapper[4634]: I0929 15:01:05.459390 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/util/0.log" Sep 29 15:01:05 crc kubenswrapper[4634]: I0929 15:01:05.477183 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/pull/0.log" Sep 29 15:01:05 crc kubenswrapper[4634]: I0929 15:01:05.510125 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/pull/0.log" Sep 29 15:01:05 crc kubenswrapper[4634]: I0929 15:01:05.807787 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/extract/0.log" Sep 29 15:01:05 crc kubenswrapper[4634]: I0929 15:01:05.808798 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/util/0.log" Sep 29 15:01:05 crc kubenswrapper[4634]: I0929 15:01:05.829756 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/pull/0.log" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.018392 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-utilities/0.log" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.110955 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 15:01:06 crc kubenswrapper[4634]: E0929 15:01:06.111272 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.254770 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-utilities/0.log" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.296472 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-content/0.log" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.323322 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-content/0.log" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.570894 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-utilities/0.log" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.591691 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-content/0.log" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.728178 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/registry-server/0.log" Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.868275 4634 generic.go:334] "Generic (PLEG): container finished" podID="4cbe58eb-a475-4d6c-ac12-0122aeccf5b0" containerID="4c60b60aeb68845a5577dbddf2122c7b9d1e2aed51b8470555ec144c0e638cdd" exitCode=0 Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.868714 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319301-qdwsk" event={"ID":"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0","Type":"ContainerDied","Data":"4c60b60aeb68845a5577dbddf2122c7b9d1e2aed51b8470555ec144c0e638cdd"} Sep 29 15:01:06 crc kubenswrapper[4634]: I0929 15:01:06.894794 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-utilities/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.086207 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-content/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.097272 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-utilities/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.174521 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-content/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.372106 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-utilities/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.373033 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-content/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.491893 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/util/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.755560 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/util/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.905161 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/pull/0.log" Sep 29 15:01:07 crc kubenswrapper[4634]: I0929 15:01:07.922015 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/pull/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.165718 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/pull/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.221726 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/util/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.240529 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/extract/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.324604 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/registry-server/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.348722 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.462400 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-combined-ca-bundle\") pod \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.463525 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-fernet-keys\") pod \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.463590 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrgjc\" (UniqueName: \"kubernetes.io/projected/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-kube-api-access-wrgjc\") pod \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.463652 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-config-data\") pod \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\" (UID: \"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0\") " Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.475372 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-kube-api-access-wrgjc" (OuterVolumeSpecName: "kube-api-access-wrgjc") pod "4cbe58eb-a475-4d6c-ac12-0122aeccf5b0" (UID: "4cbe58eb-a475-4d6c-ac12-0122aeccf5b0"). InnerVolumeSpecName "kube-api-access-wrgjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.486341 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4cbe58eb-a475-4d6c-ac12-0122aeccf5b0" (UID: "4cbe58eb-a475-4d6c-ac12-0122aeccf5b0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.530030 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-config-data" (OuterVolumeSpecName: "config-data") pod "4cbe58eb-a475-4d6c-ac12-0122aeccf5b0" (UID: "4cbe58eb-a475-4d6c-ac12-0122aeccf5b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.547454 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cbe58eb-a475-4d6c-ac12-0122aeccf5b0" (UID: "4cbe58eb-a475-4d6c-ac12-0122aeccf5b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.566716 4634 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.566763 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrgjc\" (UniqueName: \"kubernetes.io/projected/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-kube-api-access-wrgjc\") on node \"crc\" DevicePath \"\"" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.566776 4634 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.566785 4634 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cbe58eb-a475-4d6c-ac12-0122aeccf5b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.597839 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-pcm5r_a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9/marketplace-operator/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.662215 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-utilities/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.828000 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-utilities/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.852158 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-content/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.853953 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-content/0.log" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.887183 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319301-qdwsk" event={"ID":"4cbe58eb-a475-4d6c-ac12-0122aeccf5b0","Type":"ContainerDied","Data":"7e21f8ffe2185293c4027fd4b857edb119327ea0d03afafb071b259887ee049a"} Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.887236 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e21f8ffe2185293c4027fd4b857edb119327ea0d03afafb071b259887ee049a" Sep 29 15:01:08 crc kubenswrapper[4634]: I0929 15:01:08.887262 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319301-qdwsk" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.181837 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-utilities/0.log" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.251465 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/registry-server/0.log" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.275233 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-utilities/0.log" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.287289 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-content/0.log" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.476297 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-content/0.log" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.484322 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-utilities/0.log" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.530655 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-content/0.log" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.738629 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-utilities/0.log" Sep 29 15:01:09 crc kubenswrapper[4634]: I0929 15:01:09.836312 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-content/0.log" Sep 29 15:01:10 crc kubenswrapper[4634]: I0929 15:01:10.354743 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/registry-server/0.log" Sep 29 15:01:20 crc kubenswrapper[4634]: I0929 15:01:20.110225 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 15:01:21 crc kubenswrapper[4634]: I0929 15:01:21.024568 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"9f7cdaf4ee5f526676ee19fe2c57bacdee76388e7e71e9fb969e080ae3995dee"} Sep 29 15:01:46 crc kubenswrapper[4634]: E0929 15:01:46.305108 4634 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.129.56.93:51498->38.129.56.93:35553: write tcp 38.129.56.93:51498->38.129.56.93:35553: write: connection reset by peer Sep 29 15:02:12 crc kubenswrapper[4634]: I0929 15:02:12.836583 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-74xr9"] Sep 29 15:02:12 crc kubenswrapper[4634]: E0929 15:02:12.837824 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cbe58eb-a475-4d6c-ac12-0122aeccf5b0" containerName="keystone-cron" Sep 29 15:02:12 crc kubenswrapper[4634]: I0929 15:02:12.837843 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cbe58eb-a475-4d6c-ac12-0122aeccf5b0" containerName="keystone-cron" Sep 29 15:02:12 crc kubenswrapper[4634]: I0929 15:02:12.838251 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cbe58eb-a475-4d6c-ac12-0122aeccf5b0" containerName="keystone-cron" Sep 29 15:02:12 crc kubenswrapper[4634]: I0929 15:02:12.840193 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:12 crc kubenswrapper[4634]: I0929 15:02:12.854555 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-74xr9"] Sep 29 15:02:12 crc kubenswrapper[4634]: I0929 15:02:12.928871 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-utilities\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:12 crc kubenswrapper[4634]: I0929 15:02:12.928919 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bd4x\" (UniqueName: \"kubernetes.io/projected/85a45de9-ada9-44a7-9dc1-654f054069d7-kube-api-access-2bd4x\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:12 crc kubenswrapper[4634]: I0929 15:02:12.929256 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-catalog-content\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:13 crc kubenswrapper[4634]: I0929 15:02:13.031137 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-utilities\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:13 crc kubenswrapper[4634]: I0929 15:02:13.031208 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bd4x\" (UniqueName: \"kubernetes.io/projected/85a45de9-ada9-44a7-9dc1-654f054069d7-kube-api-access-2bd4x\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:13 crc kubenswrapper[4634]: I0929 15:02:13.031263 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-catalog-content\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:13 crc kubenswrapper[4634]: I0929 15:02:13.031932 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-utilities\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:13 crc kubenswrapper[4634]: I0929 15:02:13.031973 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-catalog-content\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:13 crc kubenswrapper[4634]: I0929 15:02:13.060297 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bd4x\" (UniqueName: \"kubernetes.io/projected/85a45de9-ada9-44a7-9dc1-654f054069d7-kube-api-access-2bd4x\") pod \"community-operators-74xr9\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:13 crc kubenswrapper[4634]: I0929 15:02:13.171620 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:13 crc kubenswrapper[4634]: I0929 15:02:13.881555 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-74xr9"] Sep 29 15:02:14 crc kubenswrapper[4634]: I0929 15:02:14.655286 4634 generic.go:334] "Generic (PLEG): container finished" podID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerID="97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030" exitCode=0 Sep 29 15:02:14 crc kubenswrapper[4634]: I0929 15:02:14.656287 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74xr9" event={"ID":"85a45de9-ada9-44a7-9dc1-654f054069d7","Type":"ContainerDied","Data":"97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030"} Sep 29 15:02:14 crc kubenswrapper[4634]: I0929 15:02:14.656350 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74xr9" event={"ID":"85a45de9-ada9-44a7-9dc1-654f054069d7","Type":"ContainerStarted","Data":"e5bf407d1d3421ab14715cfbca836c3c32c52b2e87805b8f6582741a13c4465d"} Sep 29 15:02:14 crc kubenswrapper[4634]: I0929 15:02:14.661861 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 15:02:16 crc kubenswrapper[4634]: I0929 15:02:16.690585 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74xr9" event={"ID":"85a45de9-ada9-44a7-9dc1-654f054069d7","Type":"ContainerStarted","Data":"ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec"} Sep 29 15:02:17 crc kubenswrapper[4634]: I0929 15:02:17.705345 4634 generic.go:334] "Generic (PLEG): container finished" podID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerID="ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec" exitCode=0 Sep 29 15:02:17 crc kubenswrapper[4634]: I0929 15:02:17.705424 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74xr9" event={"ID":"85a45de9-ada9-44a7-9dc1-654f054069d7","Type":"ContainerDied","Data":"ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec"} Sep 29 15:02:18 crc kubenswrapper[4634]: I0929 15:02:18.749638 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74xr9" event={"ID":"85a45de9-ada9-44a7-9dc1-654f054069d7","Type":"ContainerStarted","Data":"3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39"} Sep 29 15:02:18 crc kubenswrapper[4634]: I0929 15:02:18.781948 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-74xr9" podStartSLOduration=3.234879142 podStartE2EDuration="6.781921459s" podCreationTimestamp="2025-09-29 15:02:12 +0000 UTC" firstStartedPulling="2025-09-29 15:02:14.661361253 +0000 UTC m=+4665.230089002" lastFinishedPulling="2025-09-29 15:02:18.20840357 +0000 UTC m=+4668.777131319" observedRunningTime="2025-09-29 15:02:18.774460365 +0000 UTC m=+4669.343188124" watchObservedRunningTime="2025-09-29 15:02:18.781921459 +0000 UTC m=+4669.350649208" Sep 29 15:02:23 crc kubenswrapper[4634]: I0929 15:02:23.173171 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:23 crc kubenswrapper[4634]: I0929 15:02:23.173985 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:23 crc kubenswrapper[4634]: I0929 15:02:23.229575 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:23 crc kubenswrapper[4634]: I0929 15:02:23.883197 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:23 crc kubenswrapper[4634]: I0929 15:02:23.957870 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-74xr9"] Sep 29 15:02:25 crc kubenswrapper[4634]: I0929 15:02:25.836549 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-74xr9" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerName="registry-server" containerID="cri-o://3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39" gracePeriod=2 Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.382126 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.403918 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bd4x\" (UniqueName: \"kubernetes.io/projected/85a45de9-ada9-44a7-9dc1-654f054069d7-kube-api-access-2bd4x\") pod \"85a45de9-ada9-44a7-9dc1-654f054069d7\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.404171 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-catalog-content\") pod \"85a45de9-ada9-44a7-9dc1-654f054069d7\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.404266 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-utilities\") pod \"85a45de9-ada9-44a7-9dc1-654f054069d7\" (UID: \"85a45de9-ada9-44a7-9dc1-654f054069d7\") " Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.406073 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-utilities" (OuterVolumeSpecName: "utilities") pod "85a45de9-ada9-44a7-9dc1-654f054069d7" (UID: "85a45de9-ada9-44a7-9dc1-654f054069d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.417988 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85a45de9-ada9-44a7-9dc1-654f054069d7-kube-api-access-2bd4x" (OuterVolumeSpecName: "kube-api-access-2bd4x") pod "85a45de9-ada9-44a7-9dc1-654f054069d7" (UID: "85a45de9-ada9-44a7-9dc1-654f054069d7"). InnerVolumeSpecName "kube-api-access-2bd4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.485268 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "85a45de9-ada9-44a7-9dc1-654f054069d7" (UID: "85a45de9-ada9-44a7-9dc1-654f054069d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.507727 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.507777 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/85a45de9-ada9-44a7-9dc1-654f054069d7-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.507791 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bd4x\" (UniqueName: \"kubernetes.io/projected/85a45de9-ada9-44a7-9dc1-654f054069d7-kube-api-access-2bd4x\") on node \"crc\" DevicePath \"\"" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.870045 4634 generic.go:334] "Generic (PLEG): container finished" podID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerID="3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39" exitCode=0 Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.870148 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74xr9" event={"ID":"85a45de9-ada9-44a7-9dc1-654f054069d7","Type":"ContainerDied","Data":"3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39"} Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.870218 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-74xr9" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.870253 4634 scope.go:117] "RemoveContainer" containerID="3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.870232 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-74xr9" event={"ID":"85a45de9-ada9-44a7-9dc1-654f054069d7","Type":"ContainerDied","Data":"e5bf407d1d3421ab14715cfbca836c3c32c52b2e87805b8f6582741a13c4465d"} Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.928793 4634 scope.go:117] "RemoveContainer" containerID="ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec" Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.948673 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-74xr9"] Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.959238 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-74xr9"] Sep 29 15:02:26 crc kubenswrapper[4634]: I0929 15:02:26.989743 4634 scope.go:117] "RemoveContainer" containerID="97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030" Sep 29 15:02:27 crc kubenswrapper[4634]: I0929 15:02:27.012525 4634 scope.go:117] "RemoveContainer" containerID="3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39" Sep 29 15:02:27 crc kubenswrapper[4634]: E0929 15:02:27.012971 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39\": container with ID starting with 3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39 not found: ID does not exist" containerID="3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39" Sep 29 15:02:27 crc kubenswrapper[4634]: I0929 15:02:27.013021 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39"} err="failed to get container status \"3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39\": rpc error: code = NotFound desc = could not find container \"3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39\": container with ID starting with 3e129c7a2f85f7f88c9c660389824928cd9c091c5881d1b76c7d772acd78ae39 not found: ID does not exist" Sep 29 15:02:27 crc kubenswrapper[4634]: I0929 15:02:27.013073 4634 scope.go:117] "RemoveContainer" containerID="ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec" Sep 29 15:02:27 crc kubenswrapper[4634]: E0929 15:02:27.013374 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec\": container with ID starting with ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec not found: ID does not exist" containerID="ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec" Sep 29 15:02:27 crc kubenswrapper[4634]: I0929 15:02:27.013411 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec"} err="failed to get container status \"ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec\": rpc error: code = NotFound desc = could not find container \"ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec\": container with ID starting with ff093024246f31c1b96477e6d75ad593be5bf9137ddd1dc7d247c2a3e9858fec not found: ID does not exist" Sep 29 15:02:27 crc kubenswrapper[4634]: I0929 15:02:27.013430 4634 scope.go:117] "RemoveContainer" containerID="97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030" Sep 29 15:02:27 crc kubenswrapper[4634]: E0929 15:02:27.013809 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030\": container with ID starting with 97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030 not found: ID does not exist" containerID="97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030" Sep 29 15:02:27 crc kubenswrapper[4634]: I0929 15:02:27.013869 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030"} err="failed to get container status \"97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030\": rpc error: code = NotFound desc = could not find container \"97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030\": container with ID starting with 97e7f825767be312aa666ef491f6bc03016e8823e2e89926136187ef2f9c5030 not found: ID does not exist" Sep 29 15:02:28 crc kubenswrapper[4634]: I0929 15:02:28.129996 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" path="/var/lib/kubelet/pods/85a45de9-ada9-44a7-9dc1-654f054069d7/volumes" Sep 29 15:03:44 crc kubenswrapper[4634]: I0929 15:03:44.396177 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 15:03:44 crc kubenswrapper[4634]: I0929 15:03:44.397108 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 15:04:14 crc kubenswrapper[4634]: I0929 15:04:14.395980 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 15:04:14 crc kubenswrapper[4634]: I0929 15:04:14.398760 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 15:04:18 crc kubenswrapper[4634]: I0929 15:04:18.234772 4634 generic.go:334] "Generic (PLEG): container finished" podID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerID="b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484" exitCode=0 Sep 29 15:04:18 crc kubenswrapper[4634]: I0929 15:04:18.234884 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" event={"ID":"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14","Type":"ContainerDied","Data":"b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484"} Sep 29 15:04:18 crc kubenswrapper[4634]: I0929 15:04:18.236655 4634 scope.go:117] "RemoveContainer" containerID="b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484" Sep 29 15:04:18 crc kubenswrapper[4634]: I0929 15:04:18.517039 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mfrz5_must-gather-fh6d4_a8310b7e-bfaa-44b7-8e06-34c82ecc5c14/gather/0.log" Sep 29 15:04:27 crc kubenswrapper[4634]: I0929 15:04:27.345007 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mfrz5/must-gather-fh6d4"] Sep 29 15:04:27 crc kubenswrapper[4634]: I0929 15:04:27.345969 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" podUID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerName="copy" containerID="cri-o://a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2" gracePeriod=2 Sep 29 15:04:27 crc kubenswrapper[4634]: I0929 15:04:27.354842 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mfrz5/must-gather-fh6d4"] Sep 29 15:04:27 crc kubenswrapper[4634]: I0929 15:04:27.806356 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mfrz5_must-gather-fh6d4_a8310b7e-bfaa-44b7-8e06-34c82ecc5c14/copy/0.log" Sep 29 15:04:27 crc kubenswrapper[4634]: I0929 15:04:27.807610 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 15:04:27 crc kubenswrapper[4634]: I0929 15:04:27.879094 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-must-gather-output\") pod \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\" (UID: \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\") " Sep 29 15:04:27 crc kubenswrapper[4634]: I0929 15:04:27.879299 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7qnm\" (UniqueName: \"kubernetes.io/projected/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-kube-api-access-h7qnm\") pod \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\" (UID: \"a8310b7e-bfaa-44b7-8e06-34c82ecc5c14\") " Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.074480 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" (UID: "a8310b7e-bfaa-44b7-8e06-34c82ecc5c14"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.085316 4634 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.377709 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-kube-api-access-h7qnm" (OuterVolumeSpecName: "kube-api-access-h7qnm") pod "a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" (UID: "a8310b7e-bfaa-44b7-8e06-34c82ecc5c14"). InnerVolumeSpecName "kube-api-access-h7qnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.402646 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7qnm\" (UniqueName: \"kubernetes.io/projected/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14-kube-api-access-h7qnm\") on node \"crc\" DevicePath \"\"" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.422181 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mfrz5_must-gather-fh6d4_a8310b7e-bfaa-44b7-8e06-34c82ecc5c14/copy/0.log" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.426726 4634 generic.go:334] "Generic (PLEG): container finished" podID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerID="a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2" exitCode=143 Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.426811 4634 scope.go:117] "RemoveContainer" containerID="a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.427020 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mfrz5/must-gather-fh6d4" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.513450 4634 scope.go:117] "RemoveContainer" containerID="b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.698936 4634 scope.go:117] "RemoveContainer" containerID="a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2" Sep 29 15:04:28 crc kubenswrapper[4634]: E0929 15:04:28.699946 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2\": container with ID starting with a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2 not found: ID does not exist" containerID="a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.699980 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2"} err="failed to get container status \"a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2\": rpc error: code = NotFound desc = could not find container \"a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2\": container with ID starting with a8c075ae694a09d9dab42c916d34e67aed3790a5a03e169da56b44ebad4788d2 not found: ID does not exist" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.700007 4634 scope.go:117] "RemoveContainer" containerID="b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484" Sep 29 15:04:28 crc kubenswrapper[4634]: E0929 15:04:28.700625 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484\": container with ID starting with b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484 not found: ID does not exist" containerID="b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484" Sep 29 15:04:28 crc kubenswrapper[4634]: I0929 15:04:28.700707 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484"} err="failed to get container status \"b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484\": rpc error: code = NotFound desc = could not find container \"b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484\": container with ID starting with b0cded0d4d14cfa7fcdbc12af88c23cb318e957db628231ce45b0d35d0ab0484 not found: ID does not exist" Sep 29 15:04:30 crc kubenswrapper[4634]: I0929 15:04:30.128945 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" path="/var/lib/kubelet/pods/a8310b7e-bfaa-44b7-8e06-34c82ecc5c14/volumes" Sep 29 15:04:44 crc kubenswrapper[4634]: I0929 15:04:44.395869 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 15:04:44 crc kubenswrapper[4634]: I0929 15:04:44.396877 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 15:04:44 crc kubenswrapper[4634]: I0929 15:04:44.396947 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 15:04:44 crc kubenswrapper[4634]: I0929 15:04:44.398060 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9f7cdaf4ee5f526676ee19fe2c57bacdee76388e7e71e9fb969e080ae3995dee"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 15:04:44 crc kubenswrapper[4634]: I0929 15:04:44.398191 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://9f7cdaf4ee5f526676ee19fe2c57bacdee76388e7e71e9fb969e080ae3995dee" gracePeriod=600 Sep 29 15:04:44 crc kubenswrapper[4634]: I0929 15:04:44.676286 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="9f7cdaf4ee5f526676ee19fe2c57bacdee76388e7e71e9fb969e080ae3995dee" exitCode=0 Sep 29 15:04:44 crc kubenswrapper[4634]: I0929 15:04:44.676661 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"9f7cdaf4ee5f526676ee19fe2c57bacdee76388e7e71e9fb969e080ae3995dee"} Sep 29 15:04:44 crc kubenswrapper[4634]: I0929 15:04:44.676724 4634 scope.go:117] "RemoveContainer" containerID="6d6fed4034210dc7813f5014e5789c221cdc5e70ef40f9f4df327ff3481c0d59" Sep 29 15:04:45 crc kubenswrapper[4634]: I0929 15:04:45.690857 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a"} Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.588147 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xxphc/must-gather-htqjd"] Sep 29 15:05:06 crc kubenswrapper[4634]: E0929 15:05:06.589227 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerName="registry-server" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.589243 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerName="registry-server" Sep 29 15:05:06 crc kubenswrapper[4634]: E0929 15:05:06.589276 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerName="gather" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.589283 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerName="gather" Sep 29 15:05:06 crc kubenswrapper[4634]: E0929 15:05:06.589299 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerName="extract-content" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.589307 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerName="extract-content" Sep 29 15:05:06 crc kubenswrapper[4634]: E0929 15:05:06.589326 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerName="copy" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.589333 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerName="copy" Sep 29 15:05:06 crc kubenswrapper[4634]: E0929 15:05:06.589356 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerName="extract-utilities" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.589363 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerName="extract-utilities" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.589607 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerName="copy" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.589618 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8310b7e-bfaa-44b7-8e06-34c82ecc5c14" containerName="gather" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.589633 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="85a45de9-ada9-44a7-9dc1-654f054069d7" containerName="registry-server" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.590689 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.597674 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xxphc"/"kube-root-ca.crt" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.597695 4634 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xxphc"/"openshift-service-ca.crt" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.597674 4634 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xxphc"/"default-dockercfg-8npdx" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.618248 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xxphc/must-gather-htqjd"] Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.694780 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52bvj\" (UniqueName: \"kubernetes.io/projected/4472a8eb-a763-4645-92df-233d8d34eb71-kube-api-access-52bvj\") pod \"must-gather-htqjd\" (UID: \"4472a8eb-a763-4645-92df-233d8d34eb71\") " pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.694888 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4472a8eb-a763-4645-92df-233d8d34eb71-must-gather-output\") pod \"must-gather-htqjd\" (UID: \"4472a8eb-a763-4645-92df-233d8d34eb71\") " pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.797714 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4472a8eb-a763-4645-92df-233d8d34eb71-must-gather-output\") pod \"must-gather-htqjd\" (UID: \"4472a8eb-a763-4645-92df-233d8d34eb71\") " pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.797889 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52bvj\" (UniqueName: \"kubernetes.io/projected/4472a8eb-a763-4645-92df-233d8d34eb71-kube-api-access-52bvj\") pod \"must-gather-htqjd\" (UID: \"4472a8eb-a763-4645-92df-233d8d34eb71\") " pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.798365 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4472a8eb-a763-4645-92df-233d8d34eb71-must-gather-output\") pod \"must-gather-htqjd\" (UID: \"4472a8eb-a763-4645-92df-233d8d34eb71\") " pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.827427 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52bvj\" (UniqueName: \"kubernetes.io/projected/4472a8eb-a763-4645-92df-233d8d34eb71-kube-api-access-52bvj\") pod \"must-gather-htqjd\" (UID: \"4472a8eb-a763-4645-92df-233d8d34eb71\") " pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:05:06 crc kubenswrapper[4634]: I0929 15:05:06.912506 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:05:07 crc kubenswrapper[4634]: I0929 15:05:07.485079 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xxphc/must-gather-htqjd"] Sep 29 15:05:07 crc kubenswrapper[4634]: W0929 15:05:07.495328 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4472a8eb_a763_4645_92df_233d8d34eb71.slice/crio-6e41dfd9c52ea03d05b5b5261cd25b1a0498c13c7190dfc3150e6070c1a47ad5 WatchSource:0}: Error finding container 6e41dfd9c52ea03d05b5b5261cd25b1a0498c13c7190dfc3150e6070c1a47ad5: Status 404 returned error can't find the container with id 6e41dfd9c52ea03d05b5b5261cd25b1a0498c13c7190dfc3150e6070c1a47ad5 Sep 29 15:05:07 crc kubenswrapper[4634]: I0929 15:05:07.994054 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/must-gather-htqjd" event={"ID":"4472a8eb-a763-4645-92df-233d8d34eb71","Type":"ContainerStarted","Data":"5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99"} Sep 29 15:05:07 crc kubenswrapper[4634]: I0929 15:05:07.994659 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/must-gather-htqjd" event={"ID":"4472a8eb-a763-4645-92df-233d8d34eb71","Type":"ContainerStarted","Data":"6e41dfd9c52ea03d05b5b5261cd25b1a0498c13c7190dfc3150e6070c1a47ad5"} Sep 29 15:05:09 crc kubenswrapper[4634]: I0929 15:05:09.016555 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/must-gather-htqjd" event={"ID":"4472a8eb-a763-4645-92df-233d8d34eb71","Type":"ContainerStarted","Data":"93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768"} Sep 29 15:05:09 crc kubenswrapper[4634]: I0929 15:05:09.044499 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xxphc/must-gather-htqjd" podStartSLOduration=3.044476705 podStartE2EDuration="3.044476705s" podCreationTimestamp="2025-09-29 15:05:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 15:05:09.036172147 +0000 UTC m=+4839.604899906" watchObservedRunningTime="2025-09-29 15:05:09.044476705 +0000 UTC m=+4839.613204454" Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.091795 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xxphc/crc-debug-sw2h2"] Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.095165 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.152931 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-host\") pod \"crc-debug-sw2h2\" (UID: \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\") " pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.153278 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc5cf\" (UniqueName: \"kubernetes.io/projected/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-kube-api-access-nc5cf\") pod \"crc-debug-sw2h2\" (UID: \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\") " pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.255266 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-host\") pod \"crc-debug-sw2h2\" (UID: \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\") " pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.255414 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc5cf\" (UniqueName: \"kubernetes.io/projected/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-kube-api-access-nc5cf\") pod \"crc-debug-sw2h2\" (UID: \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\") " pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.255471 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-host\") pod \"crc-debug-sw2h2\" (UID: \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\") " pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.289269 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc5cf\" (UniqueName: \"kubernetes.io/projected/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-kube-api-access-nc5cf\") pod \"crc-debug-sw2h2\" (UID: \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\") " pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:05:12 crc kubenswrapper[4634]: I0929 15:05:12.439659 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:05:12 crc kubenswrapper[4634]: W0929 15:05:12.497527 4634 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddde4a4e4_6024_4741_b9c0_4b8b85d51e5b.slice/crio-1efd7c8776ad99cc33c0f9af89566d57a9e3bbba4c62de07567dfb9a5474d64f WatchSource:0}: Error finding container 1efd7c8776ad99cc33c0f9af89566d57a9e3bbba4c62de07567dfb9a5474d64f: Status 404 returned error can't find the container with id 1efd7c8776ad99cc33c0f9af89566d57a9e3bbba4c62de07567dfb9a5474d64f Sep 29 15:05:13 crc kubenswrapper[4634]: I0929 15:05:13.079118 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/crc-debug-sw2h2" event={"ID":"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b","Type":"ContainerStarted","Data":"b63fa377934a07b03ba96a280f33faca2c47ad403de5b8df4139bc3c1c81f5e1"} Sep 29 15:05:13 crc kubenswrapper[4634]: I0929 15:05:13.079616 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/crc-debug-sw2h2" event={"ID":"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b","Type":"ContainerStarted","Data":"1efd7c8776ad99cc33c0f9af89566d57a9e3bbba4c62de07567dfb9a5474d64f"} Sep 29 15:05:13 crc kubenswrapper[4634]: I0929 15:05:13.104727 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-xxphc/crc-debug-sw2h2" podStartSLOduration=1.104703959 podStartE2EDuration="1.104703959s" podCreationTimestamp="2025-09-29 15:05:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 15:05:13.102073557 +0000 UTC m=+4843.670801306" watchObservedRunningTime="2025-09-29 15:05:13.104703959 +0000 UTC m=+4843.673431708" Sep 29 15:05:54 crc kubenswrapper[4634]: I0929 15:05:54.782950 4634 scope.go:117] "RemoveContainer" containerID="aeea3934f3bc6c933e75f8987521e98de9912bb74a6f2860222ec76d8921f6ce" Sep 29 15:06:44 crc kubenswrapper[4634]: I0929 15:06:44.396142 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 15:06:44 crc kubenswrapper[4634]: I0929 15:06:44.397404 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 15:06:52 crc kubenswrapper[4634]: I0929 15:06:52.225050 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-77c768456b-27trs_4a923c46-c064-4dbd-b91d-cc1379e39d35/barbican-api-log/0.log" Sep 29 15:06:52 crc kubenswrapper[4634]: I0929 15:06:52.236811 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-77c768456b-27trs_4a923c46-c064-4dbd-b91d-cc1379e39d35/barbican-api/0.log" Sep 29 15:06:52 crc kubenswrapper[4634]: I0929 15:06:52.531273 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5658b5d69b-mlcxf_c1e86a12-7d7e-4bbe-bcf2-030f754a91a2/barbican-keystone-listener-log/0.log" Sep 29 15:06:52 crc kubenswrapper[4634]: I0929 15:06:52.613517 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-5658b5d69b-mlcxf_c1e86a12-7d7e-4bbe-bcf2-030f754a91a2/barbican-keystone-listener/0.log" Sep 29 15:06:52 crc kubenswrapper[4634]: I0929 15:06:52.816918 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-645d46567c-ngxbx_c697d1f5-42d0-4a87-9704-64a6e1406db1/barbican-worker/0.log" Sep 29 15:06:52 crc kubenswrapper[4634]: I0929 15:06:52.896939 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-645d46567c-ngxbx_c697d1f5-42d0-4a87-9704-64a6e1406db1/barbican-worker-log/0.log" Sep 29 15:06:53 crc kubenswrapper[4634]: I0929 15:06:53.145042 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-6pbxf_5e3a5bac-db09-4bee-bc1a-a93841ada5ed/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:06:53 crc kubenswrapper[4634]: I0929 15:06:53.493797 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2985415a-1cb2-4f9a-9a10-c615ddb91dbd/ceilometer-notification-agent/0.log" Sep 29 15:06:53 crc kubenswrapper[4634]: I0929 15:06:53.495931 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2985415a-1cb2-4f9a-9a10-c615ddb91dbd/ceilometer-central-agent/0.log" Sep 29 15:06:53 crc kubenswrapper[4634]: I0929 15:06:53.595537 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2985415a-1cb2-4f9a-9a10-c615ddb91dbd/proxy-httpd/0.log" Sep 29 15:06:53 crc kubenswrapper[4634]: I0929 15:06:53.687686 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_2985415a-1cb2-4f9a-9a10-c615ddb91dbd/sg-core/0.log" Sep 29 15:06:53 crc kubenswrapper[4634]: I0929 15:06:53.881971 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8739a6eb-884d-49c7-8ff9-e44b56575552/cinder-api/0.log" Sep 29 15:06:53 crc kubenswrapper[4634]: I0929 15:06:53.910432 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8739a6eb-884d-49c7-8ff9-e44b56575552/cinder-api-log/0.log" Sep 29 15:06:54 crc kubenswrapper[4634]: I0929 15:06:54.239810 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_03850223-163a-4eca-a290-1d072a2b535d/cinder-scheduler/0.log" Sep 29 15:06:54 crc kubenswrapper[4634]: I0929 15:06:54.259261 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_03850223-163a-4eca-a290-1d072a2b535d/probe/0.log" Sep 29 15:06:54 crc kubenswrapper[4634]: I0929 15:06:54.823830 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-lt5lc_d8fe0f99-6eea-49ca-bf34-fd88555c84ec/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:06:55 crc kubenswrapper[4634]: I0929 15:06:55.061924 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-v8shz_332f4970-1479-4efc-8b35-e1795111b1b4/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:06:55 crc kubenswrapper[4634]: I0929 15:06:55.109370 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-jbsbr_8d1a9c90-4eaf-4553-b80b-2d608c11af9a/init/0.log" Sep 29 15:06:55 crc kubenswrapper[4634]: I0929 15:06:55.443425 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-jbsbr_8d1a9c90-4eaf-4553-b80b-2d608c11af9a/init/0.log" Sep 29 15:06:55 crc kubenswrapper[4634]: I0929 15:06:55.628012 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7b659bdd7f-jbsbr_8d1a9c90-4eaf-4553-b80b-2d608c11af9a/dnsmasq-dns/0.log" Sep 29 15:06:55 crc kubenswrapper[4634]: I0929 15:06:55.853191 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-xjfv2_268175fe-c76e-4032-8027-db49b1355ec7/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:06:55 crc kubenswrapper[4634]: I0929 15:06:55.877120 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_961f670c-7ab6-42b3-8fa1-b5494af46245/glance-httpd/0.log" Sep 29 15:06:56 crc kubenswrapper[4634]: I0929 15:06:56.211619 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_961f670c-7ab6-42b3-8fa1-b5494af46245/glance-log/0.log" Sep 29 15:06:56 crc kubenswrapper[4634]: I0929 15:06:56.216348 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3dc03219-407d-4010-9c0f-5bbf4d94da6a/glance-httpd/0.log" Sep 29 15:06:56 crc kubenswrapper[4634]: I0929 15:06:56.258137 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3dc03219-407d-4010-9c0f-5bbf4d94da6a/glance-log/0.log" Sep 29 15:06:57 crc kubenswrapper[4634]: I0929 15:06:57.089701 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5d5866c49b-9tt6g_24cc4bfc-123a-479d-afb7-ca6b62cd7754/horizon/0.log" Sep 29 15:06:57 crc kubenswrapper[4634]: I0929 15:06:57.425410 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-2r7hb_c2fcbf70-369d-41cd-8187-7e26848b9171/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:06:57 crc kubenswrapper[4634]: I0929 15:06:57.484230 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-5d5866c49b-9tt6g_24cc4bfc-123a-479d-afb7-ca6b62cd7754/horizon-log/0.log" Sep 29 15:06:57 crc kubenswrapper[4634]: I0929 15:06:57.528410 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-zs264_ca087dcb-6346-46ed-9750-b5548355305a/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:06:57 crc kubenswrapper[4634]: I0929 15:06:57.766577 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319301-qdwsk_4cbe58eb-a475-4d6c-ac12-0122aeccf5b0/keystone-cron/0.log" Sep 29 15:06:58 crc kubenswrapper[4634]: I0929 15:06:58.064128 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_d13e0663-c00d-4276-be1d-fc570182e28a/kube-state-metrics/0.log" Sep 29 15:06:58 crc kubenswrapper[4634]: I0929 15:06:58.197382 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-587bf8586b-wjkjk_f725a01f-c382-4260-8e4e-e530d7c0ed82/keystone-api/0.log" Sep 29 15:06:58 crc kubenswrapper[4634]: I0929 15:06:58.479445 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-6d6hs_a38e3e32-cd47-4afd-aa38-da7911b1a12f/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:00 crc kubenswrapper[4634]: I0929 15:07:00.182346 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7f758ffcf7-qsxtz_2e0453ac-a888-4906-8a1f-9ba9a0f797e0/neutron-httpd/0.log" Sep 29 15:07:00 crc kubenswrapper[4634]: I0929 15:07:00.368828 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-7f758ffcf7-qsxtz_2e0453ac-a888-4906-8a1f-9ba9a0f797e0/neutron-api/0.log" Sep 29 15:07:00 crc kubenswrapper[4634]: I0929 15:07:00.460003 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-fdzkg_b183018d-383a-4d89-bb1f-d5c1f13404a9/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:00 crc kubenswrapper[4634]: I0929 15:07:00.558475 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_76a2a736-1945-4e7f-955e-e5c33004d4df/memcached/0.log" Sep 29 15:07:01 crc kubenswrapper[4634]: I0929 15:07:01.591457 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ad87c2fd-79c4-4931-99d6-bec867ee637e/nova-cell0-conductor-conductor/0.log" Sep 29 15:07:01 crc kubenswrapper[4634]: I0929 15:07:01.839827 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_c0eded61-0572-44d1-8d17-78191173c99f/nova-cell1-conductor-conductor/0.log" Sep 29 15:07:02 crc kubenswrapper[4634]: I0929 15:07:02.238546 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3eef7d23-1a73-41a3-b80a-d0be5789f09d/nova-api-log/0.log" Sep 29 15:07:02 crc kubenswrapper[4634]: I0929 15:07:02.364997 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3eef7d23-1a73-41a3-b80a-d0be5789f09d/nova-api-api/0.log" Sep 29 15:07:02 crc kubenswrapper[4634]: I0929 15:07:02.393818 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_6bb80ea7-a2f1-4eeb-9205-722fda8a48b2/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 15:07:02 crc kubenswrapper[4634]: I0929 15:07:02.518023 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-x58xd_4dbe661a-c031-4716-9816-d5cb05957a35/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:02 crc kubenswrapper[4634]: I0929 15:07:02.952143 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6b03940d-5de0-4326-b16f-c436f6637a92/nova-metadata-log/0.log" Sep 29 15:07:03 crc kubenswrapper[4634]: I0929 15:07:03.370814 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5dcf49a2-dd23-4b67-9f54-4659168f4f18/mysql-bootstrap/0.log" Sep 29 15:07:03 crc kubenswrapper[4634]: I0929 15:07:03.681501 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_8fca2b6d-9b01-4498-bdc1-619d2b52d173/nova-scheduler-scheduler/0.log" Sep 29 15:07:03 crc kubenswrapper[4634]: I0929 15:07:03.743745 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5dcf49a2-dd23-4b67-9f54-4659168f4f18/mysql-bootstrap/0.log" Sep 29 15:07:03 crc kubenswrapper[4634]: I0929 15:07:03.912406 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_5dcf49a2-dd23-4b67-9f54-4659168f4f18/galera/0.log" Sep 29 15:07:04 crc kubenswrapper[4634]: I0929 15:07:04.122891 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_861151f8-60ad-449e-80fa-b1b64e5c5b3e/mysql-bootstrap/0.log" Sep 29 15:07:04 crc kubenswrapper[4634]: I0929 15:07:04.459808 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_861151f8-60ad-449e-80fa-b1b64e5c5b3e/galera/0.log" Sep 29 15:07:04 crc kubenswrapper[4634]: I0929 15:07:04.490478 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_861151f8-60ad-449e-80fa-b1b64e5c5b3e/mysql-bootstrap/0.log" Sep 29 15:07:04 crc kubenswrapper[4634]: I0929 15:07:04.542653 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_6b03940d-5de0-4326-b16f-c436f6637a92/nova-metadata-metadata/0.log" Sep 29 15:07:04 crc kubenswrapper[4634]: I0929 15:07:04.965189 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_041479d7-0e40-4b0c-b301-f79c133394dc/openstackclient/0.log" Sep 29 15:07:05 crc kubenswrapper[4634]: I0929 15:07:05.243681 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-lfvq4_07a47ca0-1cd2-4e8d-92ce-37083cde3744/ovn-controller/0.log" Sep 29 15:07:05 crc kubenswrapper[4634]: I0929 15:07:05.278308 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-7bt9h_5a821c1a-6f5a-47af-bbe6-072b2a2a8033/openstack-network-exporter/0.log" Sep 29 15:07:05 crc kubenswrapper[4634]: I0929 15:07:05.480144 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8xcvg_b8867c3c-d76b-4687-a044-15ba4e9b2dc2/ovsdb-server-init/0.log" Sep 29 15:07:05 crc kubenswrapper[4634]: I0929 15:07:05.658789 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8xcvg_b8867c3c-d76b-4687-a044-15ba4e9b2dc2/ovsdb-server-init/0.log" Sep 29 15:07:05 crc kubenswrapper[4634]: I0929 15:07:05.690779 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8xcvg_b8867c3c-d76b-4687-a044-15ba4e9b2dc2/ovs-vswitchd/0.log" Sep 29 15:07:05 crc kubenswrapper[4634]: I0929 15:07:05.773337 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-8xcvg_b8867c3c-d76b-4687-a044-15ba4e9b2dc2/ovsdb-server/0.log" Sep 29 15:07:05 crc kubenswrapper[4634]: I0929 15:07:05.881632 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-ztr49_4ffd6a52-c5fb-4796-b98b-c5ca2a238a41/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:06 crc kubenswrapper[4634]: I0929 15:07:06.042956 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0288fa06-e56a-4201-a883-d1ece43562ac/openstack-network-exporter/0.log" Sep 29 15:07:06 crc kubenswrapper[4634]: I0929 15:07:06.071657 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0288fa06-e56a-4201-a883-d1ece43562ac/ovn-northd/0.log" Sep 29 15:07:06 crc kubenswrapper[4634]: I0929 15:07:06.241677 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d077753a-f890-4c33-9d24-d96f3b6117f3/openstack-network-exporter/0.log" Sep 29 15:07:06 crc kubenswrapper[4634]: I0929 15:07:06.328216 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_d077753a-f890-4c33-9d24-d96f3b6117f3/ovsdbserver-nb/0.log" Sep 29 15:07:06 crc kubenswrapper[4634]: I0929 15:07:06.429229 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8ff4fb7c-c525-4c15-941e-4b8980a5b140/openstack-network-exporter/0.log" Sep 29 15:07:06 crc kubenswrapper[4634]: I0929 15:07:06.433222 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_8ff4fb7c-c525-4c15-941e-4b8980a5b140/ovsdbserver-sb/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.220368 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5bfb7db698-tmn8x_2b38f115-526d-4093-b79c-19e6b9258dbf/placement-api/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.269346 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5bfb7db698-tmn8x_2b38f115-526d-4093-b79c-19e6b9258dbf/placement-log/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.357019 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_51c0f162-132f-48c2-8e8a-65c4c4d69c69/setup-container/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.611807 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_51c0f162-132f-48c2-8e8a-65c4c4d69c69/setup-container/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.639838 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_51c0f162-132f-48c2-8e8a-65c4c4d69c69/rabbitmq/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.692212 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_63bda06a-11bd-41fc-b988-30f1aa86b490/setup-container/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.865275 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_63bda06a-11bd-41fc-b988-30f1aa86b490/setup-container/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.940214 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_63bda06a-11bd-41fc-b988-30f1aa86b490/rabbitmq/0.log" Sep 29 15:07:07 crc kubenswrapper[4634]: I0929 15:07:07.947580 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-rg8mk_652c8902-8b97-4a81-8c05-10b0702d1c68/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:08 crc kubenswrapper[4634]: I0929 15:07:08.159237 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-phrjw_8043163a-b8ee-4991-9edb-8c7522be414e/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:08 crc kubenswrapper[4634]: I0929 15:07:08.214880 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-rmxrj_61b3895b-871d-4318-8fb4-4426fcd6611a/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:08 crc kubenswrapper[4634]: I0929 15:07:08.473519 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-b7fm4_c35a01f0-4d25-41ad-8eff-9d65bbb2fa01/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:08 crc kubenswrapper[4634]: I0929 15:07:08.787149 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-9fkzs_ee18fe6a-41c7-471a-8f99-0cec5b0a2676/ssh-known-hosts-edpm-deployment/0.log" Sep 29 15:07:08 crc kubenswrapper[4634]: I0929 15:07:08.837182 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-847d5655ff-zzkf2_eb9f537d-9c82-4675-aeaf-c0e4656a1330/proxy-httpd/0.log" Sep 29 15:07:08 crc kubenswrapper[4634]: I0929 15:07:08.877317 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-847d5655ff-zzkf2_eb9f537d-9c82-4675-aeaf-c0e4656a1330/proxy-server/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.012538 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-9q4mg_6ad020b7-a243-46de-8a47-2bb8af6042a0/swift-ring-rebalance/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.108618 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/account-auditor/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.259672 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/account-reaper/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.309554 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/account-replicator/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.388453 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/account-server/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.408571 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/container-auditor/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.598157 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/container-replicator/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.638854 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/container-server/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.641842 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-auditor/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.673759 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/container-updater/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.835663 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-replicator/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.870930 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-expirer/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.913850 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-updater/0.log" Sep 29 15:07:09 crc kubenswrapper[4634]: I0929 15:07:09.941031 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/object-server/0.log" Sep 29 15:07:10 crc kubenswrapper[4634]: I0929 15:07:10.055943 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/rsync/0.log" Sep 29 15:07:10 crc kubenswrapper[4634]: I0929 15:07:10.126756 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_80d6ffb7-ae89-453f-8694-074a86517297/swift-recon-cron/0.log" Sep 29 15:07:10 crc kubenswrapper[4634]: I0929 15:07:10.280851 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-f78vg_8e05d615-586f-430c-a8c9-f871a04f31d2/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:10 crc kubenswrapper[4634]: I0929 15:07:10.449715 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_1753ec8d-9af3-4930-a9d7-88b1c2f440cb/tempest-tests-tempest-tests-runner/0.log" Sep 29 15:07:10 crc kubenswrapper[4634]: I0929 15:07:10.567605 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_8951f1a8-0969-4546-b683-b06ea036112c/test-operator-logs-container/0.log" Sep 29 15:07:10 crc kubenswrapper[4634]: I0929 15:07:10.724564 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-thjn4_572bf9da-bb03-48be-b902-48ea1755346d/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 15:07:14 crc kubenswrapper[4634]: I0929 15:07:14.396133 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 15:07:14 crc kubenswrapper[4634]: I0929 15:07:14.396745 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.287318 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zbd25"] Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.297665 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.304965 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zbd25"] Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.424490 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-utilities\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.424574 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-catalog-content\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.424954 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwc7n\" (UniqueName: \"kubernetes.io/projected/bb77db33-ffdf-43b5-99db-543d6f57f21d-kube-api-access-kwc7n\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.527195 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-utilities\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.527247 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-catalog-content\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.527329 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwc7n\" (UniqueName: \"kubernetes.io/projected/bb77db33-ffdf-43b5-99db-543d6f57f21d-kube-api-access-kwc7n\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.528218 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-utilities\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.528436 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-catalog-content\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.557352 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwc7n\" (UniqueName: \"kubernetes.io/projected/bb77db33-ffdf-43b5-99db-543d6f57f21d-kube-api-access-kwc7n\") pod \"certified-operators-zbd25\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:25 crc kubenswrapper[4634]: I0929 15:07:25.631511 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:26 crc kubenswrapper[4634]: I0929 15:07:26.230580 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zbd25"] Sep 29 15:07:26 crc kubenswrapper[4634]: I0929 15:07:26.536146 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbd25" event={"ID":"bb77db33-ffdf-43b5-99db-543d6f57f21d","Type":"ContainerStarted","Data":"1b4665fb8f4b5f578cf5c21c275e036f0c0c1e96a031fd1466558577b9d5cd4b"} Sep 29 15:07:27 crc kubenswrapper[4634]: I0929 15:07:27.569113 4634 generic.go:334] "Generic (PLEG): container finished" podID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerID="d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649" exitCode=0 Sep 29 15:07:27 crc kubenswrapper[4634]: I0929 15:07:27.569164 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbd25" event={"ID":"bb77db33-ffdf-43b5-99db-543d6f57f21d","Type":"ContainerDied","Data":"d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649"} Sep 29 15:07:27 crc kubenswrapper[4634]: I0929 15:07:27.574318 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 15:07:28 crc kubenswrapper[4634]: I0929 15:07:28.578633 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbd25" event={"ID":"bb77db33-ffdf-43b5-99db-543d6f57f21d","Type":"ContainerStarted","Data":"0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36"} Sep 29 15:07:30 crc kubenswrapper[4634]: I0929 15:07:30.607645 4634 generic.go:334] "Generic (PLEG): container finished" podID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerID="0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36" exitCode=0 Sep 29 15:07:30 crc kubenswrapper[4634]: I0929 15:07:30.607761 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbd25" event={"ID":"bb77db33-ffdf-43b5-99db-543d6f57f21d","Type":"ContainerDied","Data":"0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36"} Sep 29 15:07:31 crc kubenswrapper[4634]: I0929 15:07:31.619697 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbd25" event={"ID":"bb77db33-ffdf-43b5-99db-543d6f57f21d","Type":"ContainerStarted","Data":"674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934"} Sep 29 15:07:35 crc kubenswrapper[4634]: I0929 15:07:35.632753 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:35 crc kubenswrapper[4634]: I0929 15:07:35.633693 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:35 crc kubenswrapper[4634]: I0929 15:07:35.919173 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:35 crc kubenswrapper[4634]: I0929 15:07:35.952963 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zbd25" podStartSLOduration=7.428005712 podStartE2EDuration="10.952941186s" podCreationTimestamp="2025-09-29 15:07:25 +0000 UTC" firstStartedPulling="2025-09-29 15:07:27.574007472 +0000 UTC m=+4978.142735221" lastFinishedPulling="2025-09-29 15:07:31.098942946 +0000 UTC m=+4981.667670695" observedRunningTime="2025-09-29 15:07:31.641779726 +0000 UTC m=+4982.210507475" watchObservedRunningTime="2025-09-29 15:07:35.952941186 +0000 UTC m=+4986.521668935" Sep 29 15:07:40 crc kubenswrapper[4634]: I0929 15:07:40.725121 4634 generic.go:334] "Generic (PLEG): container finished" podID="dde4a4e4-6024-4741-b9c0-4b8b85d51e5b" containerID="b63fa377934a07b03ba96a280f33faca2c47ad403de5b8df4139bc3c1c81f5e1" exitCode=0 Sep 29 15:07:40 crc kubenswrapper[4634]: I0929 15:07:40.725223 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/crc-debug-sw2h2" event={"ID":"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b","Type":"ContainerDied","Data":"b63fa377934a07b03ba96a280f33faca2c47ad403de5b8df4139bc3c1c81f5e1"} Sep 29 15:07:41 crc kubenswrapper[4634]: I0929 15:07:41.893119 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:07:41 crc kubenswrapper[4634]: I0929 15:07:41.937068 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xxphc/crc-debug-sw2h2"] Sep 29 15:07:41 crc kubenswrapper[4634]: I0929 15:07:41.946283 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xxphc/crc-debug-sw2h2"] Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.053269 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nc5cf\" (UniqueName: \"kubernetes.io/projected/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-kube-api-access-nc5cf\") pod \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\" (UID: \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\") " Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.053405 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-host\") pod \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\" (UID: \"dde4a4e4-6024-4741-b9c0-4b8b85d51e5b\") " Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.053540 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-host" (OuterVolumeSpecName: "host") pod "dde4a4e4-6024-4741-b9c0-4b8b85d51e5b" (UID: "dde4a4e4-6024-4741-b9c0-4b8b85d51e5b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.054499 4634 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-host\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.060514 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-kube-api-access-nc5cf" (OuterVolumeSpecName: "kube-api-access-nc5cf") pod "dde4a4e4-6024-4741-b9c0-4b8b85d51e5b" (UID: "dde4a4e4-6024-4741-b9c0-4b8b85d51e5b"). InnerVolumeSpecName "kube-api-access-nc5cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.129342 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dde4a4e4-6024-4741-b9c0-4b8b85d51e5b" path="/var/lib/kubelet/pods/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b/volumes" Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.157412 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nc5cf\" (UniqueName: \"kubernetes.io/projected/dde4a4e4-6024-4741-b9c0-4b8b85d51e5b-kube-api-access-nc5cf\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.750435 4634 scope.go:117] "RemoveContainer" containerID="b63fa377934a07b03ba96a280f33faca2c47ad403de5b8df4139bc3c1c81f5e1" Sep 29 15:07:42 crc kubenswrapper[4634]: I0929 15:07:42.750505 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-sw2h2" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.150147 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xxphc/crc-debug-qws5x"] Sep 29 15:07:43 crc kubenswrapper[4634]: E0929 15:07:43.151806 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dde4a4e4-6024-4741-b9c0-4b8b85d51e5b" containerName="container-00" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.151924 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="dde4a4e4-6024-4741-b9c0-4b8b85d51e5b" containerName="container-00" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.152279 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="dde4a4e4-6024-4741-b9c0-4b8b85d51e5b" containerName="container-00" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.153187 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.279320 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cbabaa02-352b-4d93-84f0-a97e1807140b-host\") pod \"crc-debug-qws5x\" (UID: \"cbabaa02-352b-4d93-84f0-a97e1807140b\") " pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.281207 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ddps\" (UniqueName: \"kubernetes.io/projected/cbabaa02-352b-4d93-84f0-a97e1807140b-kube-api-access-8ddps\") pod \"crc-debug-qws5x\" (UID: \"cbabaa02-352b-4d93-84f0-a97e1807140b\") " pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.383250 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ddps\" (UniqueName: \"kubernetes.io/projected/cbabaa02-352b-4d93-84f0-a97e1807140b-kube-api-access-8ddps\") pod \"crc-debug-qws5x\" (UID: \"cbabaa02-352b-4d93-84f0-a97e1807140b\") " pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.383392 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cbabaa02-352b-4d93-84f0-a97e1807140b-host\") pod \"crc-debug-qws5x\" (UID: \"cbabaa02-352b-4d93-84f0-a97e1807140b\") " pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.383704 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cbabaa02-352b-4d93-84f0-a97e1807140b-host\") pod \"crc-debug-qws5x\" (UID: \"cbabaa02-352b-4d93-84f0-a97e1807140b\") " pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.404406 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ddps\" (UniqueName: \"kubernetes.io/projected/cbabaa02-352b-4d93-84f0-a97e1807140b-kube-api-access-8ddps\") pod \"crc-debug-qws5x\" (UID: \"cbabaa02-352b-4d93-84f0-a97e1807140b\") " pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.483662 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:43 crc kubenswrapper[4634]: I0929 15:07:43.763892 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/crc-debug-qws5x" event={"ID":"cbabaa02-352b-4d93-84f0-a97e1807140b","Type":"ContainerStarted","Data":"8bb85dc1ab64dec20a3f89c6964a37d614657f79358704b9016d337a8c484b9d"} Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.395588 4634 patch_prober.go:28] interesting pod/machine-config-daemon-k9jf4 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.395652 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.395704 4634 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.396499 4634 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a"} pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.396575 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" containerName="machine-config-daemon" containerID="cri-o://a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" gracePeriod=600 Sep 29 15:07:44 crc kubenswrapper[4634]: E0929 15:07:44.540444 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.781844 4634 generic.go:334] "Generic (PLEG): container finished" podID="9173d45a-da12-4090-92c3-65ad4dcec715" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" exitCode=0 Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.781963 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerDied","Data":"a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a"} Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.782056 4634 scope.go:117] "RemoveContainer" containerID="9f7cdaf4ee5f526676ee19fe2c57bacdee76388e7e71e9fb969e080ae3995dee" Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.783380 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:07:44 crc kubenswrapper[4634]: E0929 15:07:44.784017 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.786812 4634 generic.go:334] "Generic (PLEG): container finished" podID="cbabaa02-352b-4d93-84f0-a97e1807140b" containerID="e53f474ceb78e929d351d8499895b51439d79632b088f70c0a15d9d7db94e159" exitCode=0 Sep 29 15:07:44 crc kubenswrapper[4634]: I0929 15:07:44.786892 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/crc-debug-qws5x" event={"ID":"cbabaa02-352b-4d93-84f0-a97e1807140b","Type":"ContainerDied","Data":"e53f474ceb78e929d351d8499895b51439d79632b088f70c0a15d9d7db94e159"} Sep 29 15:07:45 crc kubenswrapper[4634]: I0929 15:07:45.684599 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:45 crc kubenswrapper[4634]: I0929 15:07:45.745132 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zbd25"] Sep 29 15:07:45 crc kubenswrapper[4634]: I0929 15:07:45.798407 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zbd25" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerName="registry-server" containerID="cri-o://674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934" gracePeriod=2 Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.033997 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.071603 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ddps\" (UniqueName: \"kubernetes.io/projected/cbabaa02-352b-4d93-84f0-a97e1807140b-kube-api-access-8ddps\") pod \"cbabaa02-352b-4d93-84f0-a97e1807140b\" (UID: \"cbabaa02-352b-4d93-84f0-a97e1807140b\") " Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.071684 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cbabaa02-352b-4d93-84f0-a97e1807140b-host\") pod \"cbabaa02-352b-4d93-84f0-a97e1807140b\" (UID: \"cbabaa02-352b-4d93-84f0-a97e1807140b\") " Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.072477 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cbabaa02-352b-4d93-84f0-a97e1807140b-host" (OuterVolumeSpecName: "host") pod "cbabaa02-352b-4d93-84f0-a97e1807140b" (UID: "cbabaa02-352b-4d93-84f0-a97e1807140b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.085612 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbabaa02-352b-4d93-84f0-a97e1807140b-kube-api-access-8ddps" (OuterVolumeSpecName: "kube-api-access-8ddps") pod "cbabaa02-352b-4d93-84f0-a97e1807140b" (UID: "cbabaa02-352b-4d93-84f0-a97e1807140b"). InnerVolumeSpecName "kube-api-access-8ddps". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.174194 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ddps\" (UniqueName: \"kubernetes.io/projected/cbabaa02-352b-4d93-84f0-a97e1807140b-kube-api-access-8ddps\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.174580 4634 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cbabaa02-352b-4d93-84f0-a97e1807140b-host\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.279494 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.377423 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwc7n\" (UniqueName: \"kubernetes.io/projected/bb77db33-ffdf-43b5-99db-543d6f57f21d-kube-api-access-kwc7n\") pod \"bb77db33-ffdf-43b5-99db-543d6f57f21d\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.377536 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-utilities\") pod \"bb77db33-ffdf-43b5-99db-543d6f57f21d\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.377693 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-catalog-content\") pod \"bb77db33-ffdf-43b5-99db-543d6f57f21d\" (UID: \"bb77db33-ffdf-43b5-99db-543d6f57f21d\") " Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.380161 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-utilities" (OuterVolumeSpecName: "utilities") pod "bb77db33-ffdf-43b5-99db-543d6f57f21d" (UID: "bb77db33-ffdf-43b5-99db-543d6f57f21d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.393058 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb77db33-ffdf-43b5-99db-543d6f57f21d-kube-api-access-kwc7n" (OuterVolumeSpecName: "kube-api-access-kwc7n") pod "bb77db33-ffdf-43b5-99db-543d6f57f21d" (UID: "bb77db33-ffdf-43b5-99db-543d6f57f21d"). InnerVolumeSpecName "kube-api-access-kwc7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.428717 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb77db33-ffdf-43b5-99db-543d6f57f21d" (UID: "bb77db33-ffdf-43b5-99db-543d6f57f21d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.481430 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwc7n\" (UniqueName: \"kubernetes.io/projected/bb77db33-ffdf-43b5-99db-543d6f57f21d-kube-api-access-kwc7n\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.481465 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.481475 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb77db33-ffdf-43b5-99db-543d6f57f21d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.814722 4634 generic.go:334] "Generic (PLEG): container finished" podID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerID="674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934" exitCode=0 Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.814811 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbd25" event={"ID":"bb77db33-ffdf-43b5-99db-543d6f57f21d","Type":"ContainerDied","Data":"674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934"} Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.814850 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbd25" event={"ID":"bb77db33-ffdf-43b5-99db-543d6f57f21d","Type":"ContainerDied","Data":"1b4665fb8f4b5f578cf5c21c275e036f0c0c1e96a031fd1466558577b9d5cd4b"} Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.814874 4634 scope.go:117] "RemoveContainer" containerID="674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.815039 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbd25" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.820348 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/crc-debug-qws5x" event={"ID":"cbabaa02-352b-4d93-84f0-a97e1807140b","Type":"ContainerDied","Data":"8bb85dc1ab64dec20a3f89c6964a37d614657f79358704b9016d337a8c484b9d"} Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.820398 4634 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8bb85dc1ab64dec20a3f89c6964a37d614657f79358704b9016d337a8c484b9d" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.820467 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-qws5x" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.883942 4634 scope.go:117] "RemoveContainer" containerID="0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.904074 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zbd25"] Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.913804 4634 scope.go:117] "RemoveContainer" containerID="d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.930335 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zbd25"] Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.960588 4634 scope.go:117] "RemoveContainer" containerID="674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934" Sep 29 15:07:46 crc kubenswrapper[4634]: E0929 15:07:46.961024 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934\": container with ID starting with 674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934 not found: ID does not exist" containerID="674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.961067 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934"} err="failed to get container status \"674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934\": rpc error: code = NotFound desc = could not find container \"674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934\": container with ID starting with 674594e19cf6f606a20c4d3e79bdd39a12021ab62425add9a70efed8a0dc3934 not found: ID does not exist" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.961113 4634 scope.go:117] "RemoveContainer" containerID="0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36" Sep 29 15:07:46 crc kubenswrapper[4634]: E0929 15:07:46.961422 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36\": container with ID starting with 0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36 not found: ID does not exist" containerID="0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.961468 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36"} err="failed to get container status \"0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36\": rpc error: code = NotFound desc = could not find container \"0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36\": container with ID starting with 0d42dd307b598077b6f1a9e5af2d2f4ff96e97bfe95ecad138a81401504e0c36 not found: ID does not exist" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.961490 4634 scope.go:117] "RemoveContainer" containerID="d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649" Sep 29 15:07:46 crc kubenswrapper[4634]: E0929 15:07:46.961760 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649\": container with ID starting with d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649 not found: ID does not exist" containerID="d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649" Sep 29 15:07:46 crc kubenswrapper[4634]: I0929 15:07:46.961786 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649"} err="failed to get container status \"d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649\": rpc error: code = NotFound desc = could not find container \"d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649\": container with ID starting with d461c8c0d6c1a2175e7828b668b28755f8d8a19c1516b3f2b945728f36215649 not found: ID does not exist" Sep 29 15:07:48 crc kubenswrapper[4634]: I0929 15:07:48.127317 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" path="/var/lib/kubelet/pods/bb77db33-ffdf-43b5-99db-543d6f57f21d/volumes" Sep 29 15:07:53 crc kubenswrapper[4634]: I0929 15:07:53.509212 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xxphc/crc-debug-qws5x"] Sep 29 15:07:53 crc kubenswrapper[4634]: I0929 15:07:53.517161 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xxphc/crc-debug-qws5x"] Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.125265 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbabaa02-352b-4d93-84f0-a97e1807140b" path="/var/lib/kubelet/pods/cbabaa02-352b-4d93-84f0-a97e1807140b/volumes" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.738736 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xxphc/crc-debug-qg4gp"] Sep 29 15:07:54 crc kubenswrapper[4634]: E0929 15:07:54.739263 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerName="registry-server" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.739280 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerName="registry-server" Sep 29 15:07:54 crc kubenswrapper[4634]: E0929 15:07:54.739309 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbabaa02-352b-4d93-84f0-a97e1807140b" containerName="container-00" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.739316 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbabaa02-352b-4d93-84f0-a97e1807140b" containerName="container-00" Sep 29 15:07:54 crc kubenswrapper[4634]: E0929 15:07:54.739339 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerName="extract-content" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.739347 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerName="extract-content" Sep 29 15:07:54 crc kubenswrapper[4634]: E0929 15:07:54.739360 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerName="extract-utilities" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.739368 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerName="extract-utilities" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.739605 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbabaa02-352b-4d93-84f0-a97e1807140b" containerName="container-00" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.739648 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb77db33-ffdf-43b5-99db-543d6f57f21d" containerName="registry-server" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.740481 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.839645 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/12ef7528-8bdc-4144-91b1-dd5e1783d014-host\") pod \"crc-debug-qg4gp\" (UID: \"12ef7528-8bdc-4144-91b1-dd5e1783d014\") " pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.840511 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5dg4\" (UniqueName: \"kubernetes.io/projected/12ef7528-8bdc-4144-91b1-dd5e1783d014-kube-api-access-l5dg4\") pod \"crc-debug-qg4gp\" (UID: \"12ef7528-8bdc-4144-91b1-dd5e1783d014\") " pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.942505 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5dg4\" (UniqueName: \"kubernetes.io/projected/12ef7528-8bdc-4144-91b1-dd5e1783d014-kube-api-access-l5dg4\") pod \"crc-debug-qg4gp\" (UID: \"12ef7528-8bdc-4144-91b1-dd5e1783d014\") " pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.942854 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/12ef7528-8bdc-4144-91b1-dd5e1783d014-host\") pod \"crc-debug-qg4gp\" (UID: \"12ef7528-8bdc-4144-91b1-dd5e1783d014\") " pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.942942 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/12ef7528-8bdc-4144-91b1-dd5e1783d014-host\") pod \"crc-debug-qg4gp\" (UID: \"12ef7528-8bdc-4144-91b1-dd5e1783d014\") " pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:54 crc kubenswrapper[4634]: I0929 15:07:54.979529 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5dg4\" (UniqueName: \"kubernetes.io/projected/12ef7528-8bdc-4144-91b1-dd5e1783d014-kube-api-access-l5dg4\") pod \"crc-debug-qg4gp\" (UID: \"12ef7528-8bdc-4144-91b1-dd5e1783d014\") " pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:55 crc kubenswrapper[4634]: I0929 15:07:55.065975 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:55 crc kubenswrapper[4634]: I0929 15:07:55.918445 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/crc-debug-qg4gp" event={"ID":"12ef7528-8bdc-4144-91b1-dd5e1783d014","Type":"ContainerDied","Data":"1e542dadde9606886cbb4ed8cfed26867cb91d8baa0128ea094fd4983328f3ec"} Sep 29 15:07:55 crc kubenswrapper[4634]: I0929 15:07:55.918391 4634 generic.go:334] "Generic (PLEG): container finished" podID="12ef7528-8bdc-4144-91b1-dd5e1783d014" containerID="1e542dadde9606886cbb4ed8cfed26867cb91d8baa0128ea094fd4983328f3ec" exitCode=0 Sep 29 15:07:55 crc kubenswrapper[4634]: I0929 15:07:55.919294 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/crc-debug-qg4gp" event={"ID":"12ef7528-8bdc-4144-91b1-dd5e1783d014","Type":"ContainerStarted","Data":"24a2d296ed1ca6ff630a3605d532ab383747f4957a8ac115c5ffad2b4800f706"} Sep 29 15:07:55 crc kubenswrapper[4634]: I0929 15:07:55.992074 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xxphc/crc-debug-qg4gp"] Sep 29 15:07:56 crc kubenswrapper[4634]: I0929 15:07:56.011570 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xxphc/crc-debug-qg4gp"] Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.027895 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.083852 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/12ef7528-8bdc-4144-91b1-dd5e1783d014-host\") pod \"12ef7528-8bdc-4144-91b1-dd5e1783d014\" (UID: \"12ef7528-8bdc-4144-91b1-dd5e1783d014\") " Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.083987 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/12ef7528-8bdc-4144-91b1-dd5e1783d014-host" (OuterVolumeSpecName: "host") pod "12ef7528-8bdc-4144-91b1-dd5e1783d014" (UID: "12ef7528-8bdc-4144-91b1-dd5e1783d014"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.084586 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5dg4\" (UniqueName: \"kubernetes.io/projected/12ef7528-8bdc-4144-91b1-dd5e1783d014-kube-api-access-l5dg4\") pod \"12ef7528-8bdc-4144-91b1-dd5e1783d014\" (UID: \"12ef7528-8bdc-4144-91b1-dd5e1783d014\") " Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.084948 4634 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/12ef7528-8bdc-4144-91b1-dd5e1783d014-host\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.098650 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12ef7528-8bdc-4144-91b1-dd5e1783d014-kube-api-access-l5dg4" (OuterVolumeSpecName: "kube-api-access-l5dg4") pod "12ef7528-8bdc-4144-91b1-dd5e1783d014" (UID: "12ef7528-8bdc-4144-91b1-dd5e1783d014"). InnerVolumeSpecName "kube-api-access-l5dg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.187596 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5dg4\" (UniqueName: \"kubernetes.io/projected/12ef7528-8bdc-4144-91b1-dd5e1783d014-kube-api-access-l5dg4\") on node \"crc\" DevicePath \"\"" Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.846966 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/util/0.log" Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.937762 4634 scope.go:117] "RemoveContainer" containerID="1e542dadde9606886cbb4ed8cfed26867cb91d8baa0128ea094fd4983328f3ec" Sep 29 15:07:57 crc kubenswrapper[4634]: I0929 15:07:57.937858 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/crc-debug-qg4gp" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.103888 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/pull/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.109970 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:07:58 crc kubenswrapper[4634]: E0929 15:07:58.110377 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.127424 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12ef7528-8bdc-4144-91b1-dd5e1783d014" path="/var/lib/kubelet/pods/12ef7528-8bdc-4144-91b1-dd5e1783d014/volumes" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.144338 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/pull/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.169790 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/util/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.408888 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/pull/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.423790 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/extract/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.480069 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58d0cb90600d6f54dfce61daa1eb68b1b8b324630084fe4b525dc1c87chklf9_bc9a6824-e9e0-4847-96a8-cbc4eccce6de/util/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.634962 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6495d75b5-2nbgf_7fb4797f-f58b-425a-a987-4559c9d5d481/kube-rbac-proxy/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.797272 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6495d75b5-2nbgf_7fb4797f-f58b-425a-a987-4559c9d5d481/manager/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.852359 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748c574d75-h2wqv_d82e90ad-ac20-415a-9b7e-168e6472f2a8/kube-rbac-proxy/0.log" Sep 29 15:07:58 crc kubenswrapper[4634]: I0929 15:07:58.947030 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-748c574d75-h2wqv_d82e90ad-ac20-415a-9b7e-168e6472f2a8/manager/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.066782 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d74f4d695-w2nj4_fc9290c5-62eb-4b93-8b0f-032c2474510f/kube-rbac-proxy/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.167837 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-7d74f4d695-w2nj4_fc9290c5-62eb-4b93-8b0f-032c2474510f/manager/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.249041 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67b5d44b7f-fzzjz_12b1701c-523e-428c-817b-f0ae4914b9fb/kube-rbac-proxy/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.392925 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-67b5d44b7f-fzzjz_12b1701c-523e-428c-817b-f0ae4914b9fb/manager/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.465409 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-8ff95898-b7s9w_8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15/kube-rbac-proxy/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.534109 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-8ff95898-b7s9w_8cfb0dd5-7da2-4d5f-9aa4-15b30f0adc15/manager/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.724849 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-695847bc78-g6ncd_ba50e2d0-3018-4591-81fd-9e31c5d39951/manager/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.759059 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-695847bc78-g6ncd_ba50e2d0-3018-4591-81fd-9e31c5d39951/kube-rbac-proxy/0.log" Sep 29 15:07:59 crc kubenswrapper[4634]: I0929 15:07:59.956341 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-858cd69f49-7v24f_e220a6dd-ab23-4eeb-9cb7-8496c72cc19f/kube-rbac-proxy/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.152609 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-858cd69f49-7v24f_e220a6dd-ab23-4eeb-9cb7-8496c72cc19f/manager/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.247048 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9fc8d5567-c68h7_c00a2f33-36be-4039-a5ae-73df39f84d1d/kube-rbac-proxy/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.359060 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9fc8d5567-c68h7_c00a2f33-36be-4039-a5ae-73df39f84d1d/manager/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.395210 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7bf498966c-9l547_3fce3aee-b45a-4d80-a2e5-529632ed8a2d/kube-rbac-proxy/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.578025 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7bf498966c-9l547_3fce3aee-b45a-4d80-a2e5-529632ed8a2d/manager/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.722076 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-56cf9c6b99-bd4mn_ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df/kube-rbac-proxy/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.781143 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-56cf9c6b99-bd4mn_ecc68bcd-c256-4f46-9cc8-aaeab7c7b4df/manager/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.947980 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-687b9cf756-hd2mv_0b0b3b6f-0579-4a42-bad2-ecbda8906426/kube-rbac-proxy/0.log" Sep 29 15:08:00 crc kubenswrapper[4634]: I0929 15:08:00.973332 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-687b9cf756-hd2mv_0b0b3b6f-0579-4a42-bad2-ecbda8906426/manager/0.log" Sep 29 15:08:01 crc kubenswrapper[4634]: I0929 15:08:01.127348 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54d766c9f9-k2dqf_be9fbcb2-15d0-4fc2-b745-41178d406fca/kube-rbac-proxy/0.log" Sep 29 15:08:01 crc kubenswrapper[4634]: I0929 15:08:01.334016 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54d766c9f9-k2dqf_be9fbcb2-15d0-4fc2-b745-41178d406fca/manager/0.log" Sep 29 15:08:01 crc kubenswrapper[4634]: I0929 15:08:01.409323 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-9z55w_11b77d0f-14f2-47d2-839a-6e06505787a2/kube-rbac-proxy/0.log" Sep 29 15:08:01 crc kubenswrapper[4634]: I0929 15:08:01.603803 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-9z55w_11b77d0f-14f2-47d2-839a-6e06505787a2/manager/0.log" Sep 29 15:08:01 crc kubenswrapper[4634]: I0929 15:08:01.664293 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-7c4mq_65cef236-09ce-4623-9cd8-9d4c0e1f8346/kube-rbac-proxy/0.log" Sep 29 15:08:01 crc kubenswrapper[4634]: I0929 15:08:01.739531 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-7c4mq_65cef236-09ce-4623-9cd8-9d4c0e1f8346/manager/0.log" Sep 29 15:08:01 crc kubenswrapper[4634]: I0929 15:08:01.844913 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-rq5pg_e6c834dc-3418-4d52-ade3-02c1043d6360/kube-rbac-proxy/0.log" Sep 29 15:08:01 crc kubenswrapper[4634]: I0929 15:08:01.944679 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-rq5pg_e6c834dc-3418-4d52-ade3-02c1043d6360/manager/0.log" Sep 29 15:08:02 crc kubenswrapper[4634]: I0929 15:08:02.199409 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-667746d855-fd8px_63fb32c1-31c4-4ab0-b10e-c467e2c74410/kube-rbac-proxy/0.log" Sep 29 15:08:02 crc kubenswrapper[4634]: I0929 15:08:02.455219 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fc7b59957-prtng_aea3000a-d973-4f2b-a521-dd3313901830/kube-rbac-proxy/0.log" Sep 29 15:08:02 crc kubenswrapper[4634]: I0929 15:08:02.694210 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-nzrgv_90d0c015-fc7c-4d00-b1a2-83a4e0d68ada/registry-server/0.log" Sep 29 15:08:02 crc kubenswrapper[4634]: I0929 15:08:02.722690 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-fc7b59957-prtng_aea3000a-d973-4f2b-a521-dd3313901830/operator/0.log" Sep 29 15:08:02 crc kubenswrapper[4634]: I0929 15:08:02.863134 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5f95c46c78-2vb6t_0808341c-4037-4360-bc34-dce11a7e8088/kube-rbac-proxy/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.040992 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5f95c46c78-2vb6t_0808341c-4037-4360-bc34-dce11a7e8088/manager/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.077666 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-774b97b48-66mbm_e777128b-ae24-469f-81bb-adf78608f20e/kube-rbac-proxy/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.226609 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-774b97b48-66mbm_e777128b-ae24-469f-81bb-adf78608f20e/manager/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.328457 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-cvk6x_c5c15e4b-b806-4d39-915f-c6e60e6d72ea/operator/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.469843 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-667746d855-fd8px_63fb32c1-31c4-4ab0-b10e-c467e2c74410/manager/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.622935 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-6gkh8_b108e534-0a60-4d24-a6b3-9b967045469a/kube-rbac-proxy/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.636397 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-6gkh8_b108e534-0a60-4d24-a6b3-9b967045469a/manager/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.724924 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5bf96cfbc4-2dp75_97de340e-634f-47e2-8a37-800f2261e43b/kube-rbac-proxy/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.849887 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5bf96cfbc4-2dp75_97de340e-634f-47e2-8a37-800f2261e43b/manager/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.854361 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-knb5c_6cb280b6-d86c-42cb-8887-819b38c304b8/kube-rbac-proxy/0.log" Sep 29 15:08:03 crc kubenswrapper[4634]: I0929 15:08:03.930947 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-knb5c_6cb280b6-d86c-42cb-8887-819b38c304b8/manager/0.log" Sep 29 15:08:04 crc kubenswrapper[4634]: I0929 15:08:04.069767 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-7w4vx_45b61a8e-44b5-4cca-85b6-344738b51f52/kube-rbac-proxy/0.log" Sep 29 15:08:04 crc kubenswrapper[4634]: I0929 15:08:04.099994 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-7w4vx_45b61a8e-44b5-4cca-85b6-344738b51f52/manager/0.log" Sep 29 15:08:11 crc kubenswrapper[4634]: I0929 15:08:11.110242 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:08:11 crc kubenswrapper[4634]: E0929 15:08:11.111275 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:08:21 crc kubenswrapper[4634]: I0929 15:08:21.495685 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-jq747_0c78cbc4-e705-490d-b453-9b1ec8a4ca07/control-plane-machine-set-operator/0.log" Sep 29 15:08:21 crc kubenswrapper[4634]: I0929 15:08:21.628042 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-27cm7_b02e5190-b670-4ec4-824f-a4f18cf79e33/kube-rbac-proxy/0.log" Sep 29 15:08:21 crc kubenswrapper[4634]: I0929 15:08:21.729888 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-27cm7_b02e5190-b670-4ec4-824f-a4f18cf79e33/machine-api-operator/0.log" Sep 29 15:08:23 crc kubenswrapper[4634]: I0929 15:08:23.110887 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:08:23 crc kubenswrapper[4634]: E0929 15:08:23.111233 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:08:36 crc kubenswrapper[4634]: I0929 15:08:36.110283 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:08:36 crc kubenswrapper[4634]: E0929 15:08:36.111550 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:08:37 crc kubenswrapper[4634]: I0929 15:08:37.891235 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-2vbph_d2d8a3b4-5469-4e43-853a-68ea314698d5/cert-manager-controller/0.log" Sep 29 15:08:38 crc kubenswrapper[4634]: I0929 15:08:38.109863 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-fssvx_f7a64d90-3df0-4013-9334-10cb44b056d0/cert-manager-cainjector/0.log" Sep 29 15:08:38 crc kubenswrapper[4634]: I0929 15:08:38.166105 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-whcmj_952ce650-52ed-4dcb-88bb-d9f9ce5a69ed/cert-manager-webhook/0.log" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.006816 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxrr"] Sep 29 15:08:44 crc kubenswrapper[4634]: E0929 15:08:44.010555 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ef7528-8bdc-4144-91b1-dd5e1783d014" containerName="container-00" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.010678 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ef7528-8bdc-4144-91b1-dd5e1783d014" containerName="container-00" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.011295 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="12ef7528-8bdc-4144-91b1-dd5e1783d014" containerName="container-00" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.019260 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.067719 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxrr"] Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.074746 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-utilities\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.081817 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-catalog-content\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.081937 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7rfr\" (UniqueName: \"kubernetes.io/projected/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-kube-api-access-n7rfr\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.184693 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7rfr\" (UniqueName: \"kubernetes.io/projected/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-kube-api-access-n7rfr\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.184801 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-utilities\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.184895 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-catalog-content\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.185596 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-utilities\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.185695 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-catalog-content\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.213562 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7rfr\" (UniqueName: \"kubernetes.io/projected/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-kube-api-access-n7rfr\") pod \"redhat-marketplace-nbxrr\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:44 crc kubenswrapper[4634]: I0929 15:08:44.409219 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:08:45 crc kubenswrapper[4634]: I0929 15:08:45.645097 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxrr"] Sep 29 15:08:46 crc kubenswrapper[4634]: I0929 15:08:46.494849 4634 generic.go:334] "Generic (PLEG): container finished" podID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerID="485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900" exitCode=0 Sep 29 15:08:46 crc kubenswrapper[4634]: I0929 15:08:46.494907 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxrr" event={"ID":"53c1c4df-ef5f-4970-8bc1-99c3d8080f46","Type":"ContainerDied","Data":"485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900"} Sep 29 15:08:46 crc kubenswrapper[4634]: I0929 15:08:46.495508 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxrr" event={"ID":"53c1c4df-ef5f-4970-8bc1-99c3d8080f46","Type":"ContainerStarted","Data":"bb08405106d0e441f33d1b10686dc17d32fbdb69070ace2e085662bee2879f71"} Sep 29 15:08:48 crc kubenswrapper[4634]: I0929 15:08:48.530068 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxrr" event={"ID":"53c1c4df-ef5f-4970-8bc1-99c3d8080f46","Type":"ContainerStarted","Data":"8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7"} Sep 29 15:08:50 crc kubenswrapper[4634]: I0929 15:08:50.552667 4634 generic.go:334] "Generic (PLEG): container finished" podID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerID="8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7" exitCode=0 Sep 29 15:08:50 crc kubenswrapper[4634]: I0929 15:08:50.552748 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxrr" event={"ID":"53c1c4df-ef5f-4970-8bc1-99c3d8080f46","Type":"ContainerDied","Data":"8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7"} Sep 29 15:08:51 crc kubenswrapper[4634]: I0929 15:08:51.111016 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:08:51 crc kubenswrapper[4634]: E0929 15:08:51.111875 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:08:53 crc kubenswrapper[4634]: I0929 15:08:53.584680 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxrr" event={"ID":"53c1c4df-ef5f-4970-8bc1-99c3d8080f46","Type":"ContainerStarted","Data":"0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28"} Sep 29 15:08:54 crc kubenswrapper[4634]: I0929 15:08:54.649734 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nbxrr" podStartSLOduration=4.92924701 podStartE2EDuration="11.649709956s" podCreationTimestamp="2025-09-29 15:08:43 +0000 UTC" firstStartedPulling="2025-09-29 15:08:46.497078275 +0000 UTC m=+5057.065806064" lastFinishedPulling="2025-09-29 15:08:53.217541261 +0000 UTC m=+5063.786269010" observedRunningTime="2025-09-29 15:08:54.638411363 +0000 UTC m=+5065.207139112" watchObservedRunningTime="2025-09-29 15:08:54.649709956 +0000 UTC m=+5065.218437705" Sep 29 15:08:56 crc kubenswrapper[4634]: I0929 15:08:56.071722 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-qz252_92541952-adc1-4f55-a7c7-14d68fd9df0d/nmstate-console-plugin/0.log" Sep 29 15:08:56 crc kubenswrapper[4634]: I0929 15:08:56.356454 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-przz9_915d1f27-b652-4527-9df6-c1a1ee347d9d/nmstate-handler/0.log" Sep 29 15:08:56 crc kubenswrapper[4634]: I0929 15:08:56.449461 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-6h8nl_b4bdb338-a719-4d83-a12a-f0b18a589d65/kube-rbac-proxy/0.log" Sep 29 15:08:56 crc kubenswrapper[4634]: I0929 15:08:56.491006 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-6h8nl_b4bdb338-a719-4d83-a12a-f0b18a589d65/nmstate-metrics/0.log" Sep 29 15:08:56 crc kubenswrapper[4634]: I0929 15:08:56.688459 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-qnlfb_805b844b-fda9-431c-a652-d9c6211769f3/nmstate-operator/0.log" Sep 29 15:08:56 crc kubenswrapper[4634]: I0929 15:08:56.707544 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-4hp98_a10543ec-dd79-4bc4-9330-ecca62f0dcde/nmstate-webhook/0.log" Sep 29 15:09:04 crc kubenswrapper[4634]: I0929 15:09:04.409769 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:09:04 crc kubenswrapper[4634]: I0929 15:09:04.410404 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:09:04 crc kubenswrapper[4634]: I0929 15:09:04.466374 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:09:04 crc kubenswrapper[4634]: I0929 15:09:04.784970 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:09:04 crc kubenswrapper[4634]: I0929 15:09:04.849116 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxrr"] Sep 29 15:09:05 crc kubenswrapper[4634]: I0929 15:09:05.110338 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:09:05 crc kubenswrapper[4634]: E0929 15:09:05.110599 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:09:06 crc kubenswrapper[4634]: I0929 15:09:06.738445 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nbxrr" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerName="registry-server" containerID="cri-o://0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28" gracePeriod=2 Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.237687 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.388852 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-utilities\") pod \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.389049 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7rfr\" (UniqueName: \"kubernetes.io/projected/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-kube-api-access-n7rfr\") pod \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.389110 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-catalog-content\") pod \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\" (UID: \"53c1c4df-ef5f-4970-8bc1-99c3d8080f46\") " Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.389445 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-utilities" (OuterVolumeSpecName: "utilities") pod "53c1c4df-ef5f-4970-8bc1-99c3d8080f46" (UID: "53c1c4df-ef5f-4970-8bc1-99c3d8080f46"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.390426 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.394421 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-kube-api-access-n7rfr" (OuterVolumeSpecName: "kube-api-access-n7rfr") pod "53c1c4df-ef5f-4970-8bc1-99c3d8080f46" (UID: "53c1c4df-ef5f-4970-8bc1-99c3d8080f46"). InnerVolumeSpecName "kube-api-access-n7rfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.402629 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "53c1c4df-ef5f-4970-8bc1-99c3d8080f46" (UID: "53c1c4df-ef5f-4970-8bc1-99c3d8080f46"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.492369 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7rfr\" (UniqueName: \"kubernetes.io/projected/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-kube-api-access-n7rfr\") on node \"crc\" DevicePath \"\"" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.492404 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/53c1c4df-ef5f-4970-8bc1-99c3d8080f46-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.750510 4634 generic.go:334] "Generic (PLEG): container finished" podID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerID="0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28" exitCode=0 Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.750563 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxrr" event={"ID":"53c1c4df-ef5f-4970-8bc1-99c3d8080f46","Type":"ContainerDied","Data":"0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28"} Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.750601 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxrr" event={"ID":"53c1c4df-ef5f-4970-8bc1-99c3d8080f46","Type":"ContainerDied","Data":"bb08405106d0e441f33d1b10686dc17d32fbdb69070ace2e085662bee2879f71"} Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.750625 4634 scope.go:117] "RemoveContainer" containerID="0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.750798 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbxrr" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.783064 4634 scope.go:117] "RemoveContainer" containerID="8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.803618 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxrr"] Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.828371 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxrr"] Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.832531 4634 scope.go:117] "RemoveContainer" containerID="485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.874057 4634 scope.go:117] "RemoveContainer" containerID="0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28" Sep 29 15:09:07 crc kubenswrapper[4634]: E0929 15:09:07.874703 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28\": container with ID starting with 0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28 not found: ID does not exist" containerID="0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.874736 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28"} err="failed to get container status \"0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28\": rpc error: code = NotFound desc = could not find container \"0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28\": container with ID starting with 0a296c4191e065957b78c4af187ad973295a83959aea5caf82e3ad46fcb39e28 not found: ID does not exist" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.874760 4634 scope.go:117] "RemoveContainer" containerID="8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7" Sep 29 15:09:07 crc kubenswrapper[4634]: E0929 15:09:07.875404 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7\": container with ID starting with 8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7 not found: ID does not exist" containerID="8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.875441 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7"} err="failed to get container status \"8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7\": rpc error: code = NotFound desc = could not find container \"8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7\": container with ID starting with 8ff6e7c0c6ec3d988d09b92ca488e4e5d1530aa238dca754f96b5d98f77552d7 not found: ID does not exist" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.875459 4634 scope.go:117] "RemoveContainer" containerID="485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900" Sep 29 15:09:07 crc kubenswrapper[4634]: E0929 15:09:07.875963 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900\": container with ID starting with 485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900 not found: ID does not exist" containerID="485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900" Sep 29 15:09:07 crc kubenswrapper[4634]: I0929 15:09:07.876002 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900"} err="failed to get container status \"485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900\": rpc error: code = NotFound desc = could not find container \"485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900\": container with ID starting with 485b177082f2df24511953a654c926b6aa2b1103196387cad75865aa06fdd900 not found: ID does not exist" Sep 29 15:09:08 crc kubenswrapper[4634]: I0929 15:09:08.127233 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" path="/var/lib/kubelet/pods/53c1c4df-ef5f-4970-8bc1-99c3d8080f46/volumes" Sep 29 15:09:14 crc kubenswrapper[4634]: I0929 15:09:14.045458 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-vrscx_799ed420-430a-45c8-99a7-de9125bf452d/controller/0.log" Sep 29 15:09:14 crc kubenswrapper[4634]: I0929 15:09:14.137194 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-vrscx_799ed420-430a-45c8-99a7-de9125bf452d/kube-rbac-proxy/0.log" Sep 29 15:09:14 crc kubenswrapper[4634]: I0929 15:09:14.267390 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-frr-files/0.log" Sep 29 15:09:14 crc kubenswrapper[4634]: I0929 15:09:14.528075 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-metrics/0.log" Sep 29 15:09:14 crc kubenswrapper[4634]: I0929 15:09:14.571999 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-frr-files/0.log" Sep 29 15:09:14 crc kubenswrapper[4634]: I0929 15:09:14.588901 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-reloader/0.log" Sep 29 15:09:14 crc kubenswrapper[4634]: I0929 15:09:14.903184 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-reloader/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.222520 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-frr-files/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.230553 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-reloader/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.339675 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-metrics/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.346201 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-metrics/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.567794 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-reloader/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.583698 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-metrics/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.607669 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/cp-frr-files/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.685685 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/controller/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.850145 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/frr-metrics/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.850501 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/kube-rbac-proxy/0.log" Sep 29 15:09:15 crc kubenswrapper[4634]: I0929 15:09:15.938897 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/kube-rbac-proxy-frr/0.log" Sep 29 15:09:16 crc kubenswrapper[4634]: I0929 15:09:16.116807 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/reloader/0.log" Sep 29 15:09:16 crc kubenswrapper[4634]: I0929 15:09:16.369236 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-7772m_c4a4e6c8-9854-47b3-b11e-41a9c78334a8/frr-k8s-webhook-server/0.log" Sep 29 15:09:16 crc kubenswrapper[4634]: I0929 15:09:16.573762 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-64444f645d-qz74c_a7e55c36-0f57-469f-8419-b9ccb4465010/manager/0.log" Sep 29 15:09:16 crc kubenswrapper[4634]: I0929 15:09:16.731824 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5ff45f5c66-t6xh9_03429314-d17f-4ffa-9d58-b89748690fec/webhook-server/0.log" Sep 29 15:09:16 crc kubenswrapper[4634]: I0929 15:09:16.978951 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fbwzx_e5394208-75d4-4a32-98c2-16299c7bf5fa/kube-rbac-proxy/0.log" Sep 29 15:09:17 crc kubenswrapper[4634]: I0929 15:09:17.061494 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mgm82_f9a02c1e-9d46-46f2-891d-d8b81b95736c/frr/0.log" Sep 29 15:09:17 crc kubenswrapper[4634]: I0929 15:09:17.110398 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:09:17 crc kubenswrapper[4634]: E0929 15:09:17.110609 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:09:17 crc kubenswrapper[4634]: I0929 15:09:17.391379 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-fbwzx_e5394208-75d4-4a32-98c2-16299c7bf5fa/speaker/0.log" Sep 29 15:09:32 crc kubenswrapper[4634]: I0929 15:09:32.111436 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:09:32 crc kubenswrapper[4634]: E0929 15:09:32.112382 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:09:33 crc kubenswrapper[4634]: I0929 15:09:33.219926 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/util/0.log" Sep 29 15:09:33 crc kubenswrapper[4634]: I0929 15:09:33.467695 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/util/0.log" Sep 29 15:09:33 crc kubenswrapper[4634]: I0929 15:09:33.496249 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/pull/0.log" Sep 29 15:09:33 crc kubenswrapper[4634]: I0929 15:09:33.930367 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/pull/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.087597 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/extract/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.099215 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/util/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.117892 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcbp4z4_16b384c1-46b2-4b51-bf5c-689bc809e5ec/pull/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.331808 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-utilities/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.542702 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-utilities/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.601430 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-content/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.636196 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-content/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.817254 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-content/0.log" Sep 29 15:09:34 crc kubenswrapper[4634]: I0929 15:09:34.915426 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/extract-utilities/0.log" Sep 29 15:09:35 crc kubenswrapper[4634]: I0929 15:09:35.113660 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-w4xqx_0d29b02a-75cf-4064-b063-4705c99544e7/registry-server/0.log" Sep 29 15:09:35 crc kubenswrapper[4634]: I0929 15:09:35.152147 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-utilities/0.log" Sep 29 15:09:35 crc kubenswrapper[4634]: I0929 15:09:35.398130 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-utilities/0.log" Sep 29 15:09:35 crc kubenswrapper[4634]: I0929 15:09:35.412243 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-content/0.log" Sep 29 15:09:35 crc kubenswrapper[4634]: I0929 15:09:35.465799 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-content/0.log" Sep 29 15:09:35 crc kubenswrapper[4634]: I0929 15:09:35.687413 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-content/0.log" Sep 29 15:09:35 crc kubenswrapper[4634]: I0929 15:09:35.777178 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/extract-utilities/0.log" Sep 29 15:09:35 crc kubenswrapper[4634]: I0929 15:09:35.998338 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/util/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.310308 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/pull/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.372799 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/pull/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.428151 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/util/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.480695 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2nr7c_b28ef713-38f8-4b94-a0fc-bc83b791a6d3/registry-server/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.656639 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/extract/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.686284 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/util/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.706333 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96twzr2_7239db40-db27-43b6-9f27-64800144ed27/pull/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.839042 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-pcm5r_a4a3ef07-d59a-4a9c-afe0-bbef3ad0b7b9/marketplace-operator/0.log" Sep 29 15:09:36 crc kubenswrapper[4634]: I0929 15:09:36.920144 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-utilities/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.102472 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-content/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.129865 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-content/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.131159 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-utilities/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.324721 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-content/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.336450 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/extract-utilities/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.410367 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-utilities/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.606157 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-qkgkn_1add5ad4-56e7-4d54-aaab-a74664b398ff/registry-server/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.648447 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-content/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.696172 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-content/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.725143 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-utilities/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.883794 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-utilities/0.log" Sep 29 15:09:37 crc kubenswrapper[4634]: I0929 15:09:37.924431 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/extract-content/0.log" Sep 29 15:09:38 crc kubenswrapper[4634]: I0929 15:09:38.437035 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-s569s_1c8311b2-d781-4903-961e-33b0b839aeae/registry-server/0.log" Sep 29 15:09:46 crc kubenswrapper[4634]: I0929 15:09:46.111161 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:09:46 crc kubenswrapper[4634]: E0929 15:09:46.112567 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:09:57 crc kubenswrapper[4634]: I0929 15:09:57.110236 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:09:57 crc kubenswrapper[4634]: E0929 15:09:57.110946 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.589236 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-57cs9"] Sep 29 15:10:00 crc kubenswrapper[4634]: E0929 15:10:00.590085 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerName="registry-server" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.590101 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerName="registry-server" Sep 29 15:10:00 crc kubenswrapper[4634]: E0929 15:10:00.590130 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerName="extract-utilities" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.590137 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerName="extract-utilities" Sep 29 15:10:00 crc kubenswrapper[4634]: E0929 15:10:00.590161 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerName="extract-content" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.590166 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerName="extract-content" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.590355 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="53c1c4df-ef5f-4970-8bc1-99c3d8080f46" containerName="registry-server" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.591767 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.613707 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-57cs9"] Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.673575 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l57fl\" (UniqueName: \"kubernetes.io/projected/080a2f14-c349-4a3d-9ece-fd87672d7c3f-kube-api-access-l57fl\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.673632 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-catalog-content\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.673665 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-utilities\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.775014 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l57fl\" (UniqueName: \"kubernetes.io/projected/080a2f14-c349-4a3d-9ece-fd87672d7c3f-kube-api-access-l57fl\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.775073 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-catalog-content\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.775105 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-utilities\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.775656 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-utilities\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.776183 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-catalog-content\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.807783 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l57fl\" (UniqueName: \"kubernetes.io/projected/080a2f14-c349-4a3d-9ece-fd87672d7c3f-kube-api-access-l57fl\") pod \"redhat-operators-57cs9\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:00 crc kubenswrapper[4634]: I0929 15:10:00.912309 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:01 crc kubenswrapper[4634]: I0929 15:10:01.597333 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-57cs9"] Sep 29 15:10:02 crc kubenswrapper[4634]: I0929 15:10:02.325892 4634 generic.go:334] "Generic (PLEG): container finished" podID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerID="bf3329124e6ae8726689c95df94911e03b3a4e21a611c0346b5cd9633cd67510" exitCode=0 Sep 29 15:10:02 crc kubenswrapper[4634]: I0929 15:10:02.326458 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57cs9" event={"ID":"080a2f14-c349-4a3d-9ece-fd87672d7c3f","Type":"ContainerDied","Data":"bf3329124e6ae8726689c95df94911e03b3a4e21a611c0346b5cd9633cd67510"} Sep 29 15:10:02 crc kubenswrapper[4634]: I0929 15:10:02.326508 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57cs9" event={"ID":"080a2f14-c349-4a3d-9ece-fd87672d7c3f","Type":"ContainerStarted","Data":"190f8e2af716119e8a8e9fe703b5da645a744b1c6db4dcff24769a282a197445"} Sep 29 15:10:04 crc kubenswrapper[4634]: I0929 15:10:04.347154 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57cs9" event={"ID":"080a2f14-c349-4a3d-9ece-fd87672d7c3f","Type":"ContainerStarted","Data":"fb13157a3a4e9733b7dcc0627ebe34ba43b248979c77492007008d993436a2c5"} Sep 29 15:10:09 crc kubenswrapper[4634]: I0929 15:10:09.396889 4634 generic.go:334] "Generic (PLEG): container finished" podID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerID="fb13157a3a4e9733b7dcc0627ebe34ba43b248979c77492007008d993436a2c5" exitCode=0 Sep 29 15:10:09 crc kubenswrapper[4634]: I0929 15:10:09.397501 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57cs9" event={"ID":"080a2f14-c349-4a3d-9ece-fd87672d7c3f","Type":"ContainerDied","Data":"fb13157a3a4e9733b7dcc0627ebe34ba43b248979c77492007008d993436a2c5"} Sep 29 15:10:10 crc kubenswrapper[4634]: I0929 15:10:10.117794 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:10:10 crc kubenswrapper[4634]: E0929 15:10:10.118708 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:10:10 crc kubenswrapper[4634]: I0929 15:10:10.407467 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57cs9" event={"ID":"080a2f14-c349-4a3d-9ece-fd87672d7c3f","Type":"ContainerStarted","Data":"94d4b371c0cb3500713677ba2f67b8eb8406482e6919c8cd378ad1be411d22b8"} Sep 29 15:10:10 crc kubenswrapper[4634]: I0929 15:10:10.431903 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-57cs9" podStartSLOduration=2.659365818 podStartE2EDuration="10.431881565s" podCreationTimestamp="2025-09-29 15:10:00 +0000 UTC" firstStartedPulling="2025-09-29 15:10:02.328728002 +0000 UTC m=+5132.897455761" lastFinishedPulling="2025-09-29 15:10:10.101243759 +0000 UTC m=+5140.669971508" observedRunningTime="2025-09-29 15:10:10.424739779 +0000 UTC m=+5140.993467528" watchObservedRunningTime="2025-09-29 15:10:10.431881565 +0000 UTC m=+5141.000609314" Sep 29 15:10:10 crc kubenswrapper[4634]: I0929 15:10:10.912495 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:10 crc kubenswrapper[4634]: I0929 15:10:10.912881 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:11 crc kubenswrapper[4634]: I0929 15:10:11.963586 4634 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-57cs9" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="registry-server" probeResult="failure" output=< Sep 29 15:10:11 crc kubenswrapper[4634]: timeout: failed to connect service ":50051" within 1s Sep 29 15:10:11 crc kubenswrapper[4634]: > Sep 29 15:10:20 crc kubenswrapper[4634]: I0929 15:10:20.980691 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:21 crc kubenswrapper[4634]: I0929 15:10:21.046809 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:21 crc kubenswrapper[4634]: I0929 15:10:21.224696 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-57cs9"] Sep 29 15:10:22 crc kubenswrapper[4634]: I0929 15:10:22.535162 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-57cs9" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="registry-server" containerID="cri-o://94d4b371c0cb3500713677ba2f67b8eb8406482e6919c8cd378ad1be411d22b8" gracePeriod=2 Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.545267 4634 generic.go:334] "Generic (PLEG): container finished" podID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerID="94d4b371c0cb3500713677ba2f67b8eb8406482e6919c8cd378ad1be411d22b8" exitCode=0 Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.545354 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57cs9" event={"ID":"080a2f14-c349-4a3d-9ece-fd87672d7c3f","Type":"ContainerDied","Data":"94d4b371c0cb3500713677ba2f67b8eb8406482e6919c8cd378ad1be411d22b8"} Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.662484 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.682923 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-utilities\") pod \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.683132 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-catalog-content\") pod \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.683156 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l57fl\" (UniqueName: \"kubernetes.io/projected/080a2f14-c349-4a3d-9ece-fd87672d7c3f-kube-api-access-l57fl\") pod \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\" (UID: \"080a2f14-c349-4a3d-9ece-fd87672d7c3f\") " Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.684037 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-utilities" (OuterVolumeSpecName: "utilities") pod "080a2f14-c349-4a3d-9ece-fd87672d7c3f" (UID: "080a2f14-c349-4a3d-9ece-fd87672d7c3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.692081 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/080a2f14-c349-4a3d-9ece-fd87672d7c3f-kube-api-access-l57fl" (OuterVolumeSpecName: "kube-api-access-l57fl") pod "080a2f14-c349-4a3d-9ece-fd87672d7c3f" (UID: "080a2f14-c349-4a3d-9ece-fd87672d7c3f"). InnerVolumeSpecName "kube-api-access-l57fl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.786637 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l57fl\" (UniqueName: \"kubernetes.io/projected/080a2f14-c349-4a3d-9ece-fd87672d7c3f-kube-api-access-l57fl\") on node \"crc\" DevicePath \"\"" Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.786675 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.793556 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "080a2f14-c349-4a3d-9ece-fd87672d7c3f" (UID: "080a2f14-c349-4a3d-9ece-fd87672d7c3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:10:23 crc kubenswrapper[4634]: I0929 15:10:23.889012 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/080a2f14-c349-4a3d-9ece-fd87672d7c3f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 15:10:24 crc kubenswrapper[4634]: I0929 15:10:24.558434 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-57cs9" event={"ID":"080a2f14-c349-4a3d-9ece-fd87672d7c3f","Type":"ContainerDied","Data":"190f8e2af716119e8a8e9fe703b5da645a744b1c6db4dcff24769a282a197445"} Sep 29 15:10:24 crc kubenswrapper[4634]: I0929 15:10:24.558489 4634 scope.go:117] "RemoveContainer" containerID="94d4b371c0cb3500713677ba2f67b8eb8406482e6919c8cd378ad1be411d22b8" Sep 29 15:10:24 crc kubenswrapper[4634]: I0929 15:10:24.558623 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-57cs9" Sep 29 15:10:24 crc kubenswrapper[4634]: I0929 15:10:24.598256 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-57cs9"] Sep 29 15:10:24 crc kubenswrapper[4634]: I0929 15:10:24.608532 4634 scope.go:117] "RemoveContainer" containerID="fb13157a3a4e9733b7dcc0627ebe34ba43b248979c77492007008d993436a2c5" Sep 29 15:10:24 crc kubenswrapper[4634]: I0929 15:10:24.608959 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-57cs9"] Sep 29 15:10:24 crc kubenswrapper[4634]: I0929 15:10:24.634028 4634 scope.go:117] "RemoveContainer" containerID="bf3329124e6ae8726689c95df94911e03b3a4e21a611c0346b5cd9633cd67510" Sep 29 15:10:25 crc kubenswrapper[4634]: I0929 15:10:25.111024 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:10:25 crc kubenswrapper[4634]: E0929 15:10:25.111500 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:10:26 crc kubenswrapper[4634]: I0929 15:10:26.129851 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" path="/var/lib/kubelet/pods/080a2f14-c349-4a3d-9ece-fd87672d7c3f/volumes" Sep 29 15:10:37 crc kubenswrapper[4634]: I0929 15:10:37.111536 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:10:37 crc kubenswrapper[4634]: E0929 15:10:37.112744 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:10:52 crc kubenswrapper[4634]: I0929 15:10:52.110549 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:10:52 crc kubenswrapper[4634]: E0929 15:10:52.112740 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:11:05 crc kubenswrapper[4634]: I0929 15:11:05.110011 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:11:05 crc kubenswrapper[4634]: E0929 15:11:05.110774 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:11:17 crc kubenswrapper[4634]: I0929 15:11:17.111276 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:11:17 crc kubenswrapper[4634]: E0929 15:11:17.112461 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:11:32 crc kubenswrapper[4634]: I0929 15:11:32.110788 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:11:32 crc kubenswrapper[4634]: E0929 15:11:32.111541 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:11:37 crc kubenswrapper[4634]: I0929 15:11:37.390225 4634 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/swift-proxy-847d5655ff-zzkf2" podUID="eb9f537d-9c82-4675-aeaf-c0e4656a1330" containerName="proxy-server" probeResult="failure" output="HTTP probe failed with statuscode: 502" Sep 29 15:11:45 crc kubenswrapper[4634]: I0929 15:11:45.110688 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:11:45 crc kubenswrapper[4634]: E0929 15:11:45.111545 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:11:59 crc kubenswrapper[4634]: I0929 15:11:59.110806 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:11:59 crc kubenswrapper[4634]: E0929 15:11:59.111959 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:12:12 crc kubenswrapper[4634]: I0929 15:12:12.110634 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:12:12 crc kubenswrapper[4634]: E0929 15:12:12.111563 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:12:27 crc kubenswrapper[4634]: I0929 15:12:27.112952 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:12:27 crc kubenswrapper[4634]: E0929 15:12:27.114211 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:12:34 crc kubenswrapper[4634]: I0929 15:12:34.210471 4634 generic.go:334] "Generic (PLEG): container finished" podID="4472a8eb-a763-4645-92df-233d8d34eb71" containerID="5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99" exitCode=0 Sep 29 15:12:34 crc kubenswrapper[4634]: I0929 15:12:34.210554 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xxphc/must-gather-htqjd" event={"ID":"4472a8eb-a763-4645-92df-233d8d34eb71","Type":"ContainerDied","Data":"5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99"} Sep 29 15:12:34 crc kubenswrapper[4634]: I0929 15:12:34.212049 4634 scope.go:117] "RemoveContainer" containerID="5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99" Sep 29 15:12:34 crc kubenswrapper[4634]: I0929 15:12:34.326368 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xxphc_must-gather-htqjd_4472a8eb-a763-4645-92df-233d8d34eb71/gather/0.log" Sep 29 15:12:40 crc kubenswrapper[4634]: I0929 15:12:40.124628 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:12:40 crc kubenswrapper[4634]: E0929 15:12:40.125669 4634 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-k9jf4_openshift-machine-config-operator(9173d45a-da12-4090-92c3-65ad4dcec715)\"" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" podUID="9173d45a-da12-4090-92c3-65ad4dcec715" Sep 29 15:12:48 crc kubenswrapper[4634]: I0929 15:12:48.350145 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xxphc/must-gather-htqjd"] Sep 29 15:12:48 crc kubenswrapper[4634]: I0929 15:12:48.351238 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-xxphc/must-gather-htqjd" podUID="4472a8eb-a763-4645-92df-233d8d34eb71" containerName="copy" containerID="cri-o://93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768" gracePeriod=2 Sep 29 15:12:48 crc kubenswrapper[4634]: I0929 15:12:48.366524 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xxphc/must-gather-htqjd"] Sep 29 15:12:48 crc kubenswrapper[4634]: I0929 15:12:48.909233 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xxphc_must-gather-htqjd_4472a8eb-a763-4645-92df-233d8d34eb71/copy/0.log" Sep 29 15:12:48 crc kubenswrapper[4634]: I0929 15:12:48.910316 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:12:48 crc kubenswrapper[4634]: I0929 15:12:48.990759 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4472a8eb-a763-4645-92df-233d8d34eb71-must-gather-output\") pod \"4472a8eb-a763-4645-92df-233d8d34eb71\" (UID: \"4472a8eb-a763-4645-92df-233d8d34eb71\") " Sep 29 15:12:48 crc kubenswrapper[4634]: I0929 15:12:48.990947 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52bvj\" (UniqueName: \"kubernetes.io/projected/4472a8eb-a763-4645-92df-233d8d34eb71-kube-api-access-52bvj\") pod \"4472a8eb-a763-4645-92df-233d8d34eb71\" (UID: \"4472a8eb-a763-4645-92df-233d8d34eb71\") " Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.010812 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4472a8eb-a763-4645-92df-233d8d34eb71-kube-api-access-52bvj" (OuterVolumeSpecName: "kube-api-access-52bvj") pod "4472a8eb-a763-4645-92df-233d8d34eb71" (UID: "4472a8eb-a763-4645-92df-233d8d34eb71"). InnerVolumeSpecName "kube-api-access-52bvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.094280 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52bvj\" (UniqueName: \"kubernetes.io/projected/4472a8eb-a763-4645-92df-233d8d34eb71-kube-api-access-52bvj\") on node \"crc\" DevicePath \"\"" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.206252 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4472a8eb-a763-4645-92df-233d8d34eb71-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "4472a8eb-a763-4645-92df-233d8d34eb71" (UID: "4472a8eb-a763-4645-92df-233d8d34eb71"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.298353 4634 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4472a8eb-a763-4645-92df-233d8d34eb71-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.493498 4634 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-xxphc_must-gather-htqjd_4472a8eb-a763-4645-92df-233d8d34eb71/copy/0.log" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.495515 4634 generic.go:334] "Generic (PLEG): container finished" podID="4472a8eb-a763-4645-92df-233d8d34eb71" containerID="93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768" exitCode=143 Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.495569 4634 scope.go:117] "RemoveContainer" containerID="93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.495684 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xxphc/must-gather-htqjd" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.544997 4634 scope.go:117] "RemoveContainer" containerID="5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.613999 4634 scope.go:117] "RemoveContainer" containerID="93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768" Sep 29 15:12:49 crc kubenswrapper[4634]: E0929 15:12:49.614610 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768\": container with ID starting with 93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768 not found: ID does not exist" containerID="93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.614647 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768"} err="failed to get container status \"93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768\": rpc error: code = NotFound desc = could not find container \"93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768\": container with ID starting with 93bfaff8fa07db01b81169e9efa4a2ed31b58cbca26674ca6955e7ce6a8d5768 not found: ID does not exist" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.614669 4634 scope.go:117] "RemoveContainer" containerID="5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99" Sep 29 15:12:49 crc kubenswrapper[4634]: E0929 15:12:49.615234 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99\": container with ID starting with 5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99 not found: ID does not exist" containerID="5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99" Sep 29 15:12:49 crc kubenswrapper[4634]: I0929 15:12:49.615259 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99"} err="failed to get container status \"5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99\": rpc error: code = NotFound desc = could not find container \"5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99\": container with ID starting with 5a54ff5949e0be6586de722c4ccfed50bc1317c47ede2ea321be9acf020a3e99 not found: ID does not exist" Sep 29 15:12:50 crc kubenswrapper[4634]: I0929 15:12:50.132728 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4472a8eb-a763-4645-92df-233d8d34eb71" path="/var/lib/kubelet/pods/4472a8eb-a763-4645-92df-233d8d34eb71/volumes" Sep 29 15:12:51 crc kubenswrapper[4634]: I0929 15:12:51.111691 4634 scope.go:117] "RemoveContainer" containerID="a88715c8598de87204595ef96928edb61caca0601e32f71a4816a52e2d3adc2a" Sep 29 15:12:52 crc kubenswrapper[4634]: I0929 15:12:52.534754 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-k9jf4" event={"ID":"9173d45a-da12-4090-92c3-65ad4dcec715","Type":"ContainerStarted","Data":"e1495bc953fb1b0354dffdb5311492067f24da27b1110b395f2d3260f5eceae1"} Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.912056 4634 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7skjs"] Sep 29 15:13:16 crc kubenswrapper[4634]: E0929 15:13:16.914460 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="extract-utilities" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.914547 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="extract-utilities" Sep 29 15:13:16 crc kubenswrapper[4634]: E0929 15:13:16.914608 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="extract-content" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.914661 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="extract-content" Sep 29 15:13:16 crc kubenswrapper[4634]: E0929 15:13:16.914725 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4472a8eb-a763-4645-92df-233d8d34eb71" containerName="gather" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.914777 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4472a8eb-a763-4645-92df-233d8d34eb71" containerName="gather" Sep 29 15:13:16 crc kubenswrapper[4634]: E0929 15:13:16.914842 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="registry-server" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.914897 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="registry-server" Sep 29 15:13:16 crc kubenswrapper[4634]: E0929 15:13:16.914959 4634 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4472a8eb-a763-4645-92df-233d8d34eb71" containerName="copy" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.915009 4634 state_mem.go:107] "Deleted CPUSet assignment" podUID="4472a8eb-a763-4645-92df-233d8d34eb71" containerName="copy" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.915290 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4472a8eb-a763-4645-92df-233d8d34eb71" containerName="copy" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.915362 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="080a2f14-c349-4a3d-9ece-fd87672d7c3f" containerName="registry-server" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.915432 4634 memory_manager.go:354] "RemoveStaleState removing state" podUID="4472a8eb-a763-4645-92df-233d8d34eb71" containerName="gather" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.917961 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:16 crc kubenswrapper[4634]: I0929 15:13:16.935208 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7skjs"] Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.019033 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-catalog-content\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.019278 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hcpb\" (UniqueName: \"kubernetes.io/projected/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-kube-api-access-4hcpb\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.019312 4634 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-utilities\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.120782 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hcpb\" (UniqueName: \"kubernetes.io/projected/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-kube-api-access-4hcpb\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.121142 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-utilities\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.121512 4634 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-catalog-content\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.122575 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-utilities\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.122683 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-catalog-content\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.147935 4634 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hcpb\" (UniqueName: \"kubernetes.io/projected/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-kube-api-access-4hcpb\") pod \"community-operators-7skjs\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.283373 4634 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:17 crc kubenswrapper[4634]: I0929 15:13:17.919843 4634 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7skjs"] Sep 29 15:13:18 crc kubenswrapper[4634]: I0929 15:13:18.830166 4634 generic.go:334] "Generic (PLEG): container finished" podID="a2f64cf9-a3b3-4552-b726-0a81eb3e4137" containerID="97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e" exitCode=0 Sep 29 15:13:18 crc kubenswrapper[4634]: I0929 15:13:18.830381 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7skjs" event={"ID":"a2f64cf9-a3b3-4552-b726-0a81eb3e4137","Type":"ContainerDied","Data":"97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e"} Sep 29 15:13:18 crc kubenswrapper[4634]: I0929 15:13:18.831052 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7skjs" event={"ID":"a2f64cf9-a3b3-4552-b726-0a81eb3e4137","Type":"ContainerStarted","Data":"226c5f0566c035bc941bdc16224d2e9cd60e459b9392f9053b6474a7c6ea1659"} Sep 29 15:13:18 crc kubenswrapper[4634]: I0929 15:13:18.832902 4634 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 15:13:20 crc kubenswrapper[4634]: I0929 15:13:20.855699 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7skjs" event={"ID":"a2f64cf9-a3b3-4552-b726-0a81eb3e4137","Type":"ContainerStarted","Data":"119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd"} Sep 29 15:13:22 crc kubenswrapper[4634]: I0929 15:13:22.878959 4634 generic.go:334] "Generic (PLEG): container finished" podID="a2f64cf9-a3b3-4552-b726-0a81eb3e4137" containerID="119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd" exitCode=0 Sep 29 15:13:22 crc kubenswrapper[4634]: I0929 15:13:22.879013 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7skjs" event={"ID":"a2f64cf9-a3b3-4552-b726-0a81eb3e4137","Type":"ContainerDied","Data":"119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd"} Sep 29 15:13:23 crc kubenswrapper[4634]: I0929 15:13:23.892191 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7skjs" event={"ID":"a2f64cf9-a3b3-4552-b726-0a81eb3e4137","Type":"ContainerStarted","Data":"f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa"} Sep 29 15:13:23 crc kubenswrapper[4634]: I0929 15:13:23.930692 4634 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7skjs" podStartSLOduration=3.320209015 podStartE2EDuration="7.930664076s" podCreationTimestamp="2025-09-29 15:13:16 +0000 UTC" firstStartedPulling="2025-09-29 15:13:18.832521339 +0000 UTC m=+5329.401249108" lastFinishedPulling="2025-09-29 15:13:23.44297641 +0000 UTC m=+5334.011704169" observedRunningTime="2025-09-29 15:13:23.917016029 +0000 UTC m=+5334.485743818" watchObservedRunningTime="2025-09-29 15:13:23.930664076 +0000 UTC m=+5334.499391865" Sep 29 15:13:27 crc kubenswrapper[4634]: I0929 15:13:27.285027 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:27 crc kubenswrapper[4634]: I0929 15:13:27.285686 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:27 crc kubenswrapper[4634]: I0929 15:13:27.375455 4634 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:37 crc kubenswrapper[4634]: I0929 15:13:37.344195 4634 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:37 crc kubenswrapper[4634]: I0929 15:13:37.415954 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7skjs"] Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.027798 4634 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7skjs" podUID="a2f64cf9-a3b3-4552-b726-0a81eb3e4137" containerName="registry-server" containerID="cri-o://f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa" gracePeriod=2 Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.520379 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.684780 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hcpb\" (UniqueName: \"kubernetes.io/projected/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-kube-api-access-4hcpb\") pod \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.684954 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-utilities\") pod \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.685109 4634 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-catalog-content\") pod \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\" (UID: \"a2f64cf9-a3b3-4552-b726-0a81eb3e4137\") " Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.702282 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-kube-api-access-4hcpb" (OuterVolumeSpecName: "kube-api-access-4hcpb") pod "a2f64cf9-a3b3-4552-b726-0a81eb3e4137" (UID: "a2f64cf9-a3b3-4552-b726-0a81eb3e4137"). InnerVolumeSpecName "kube-api-access-4hcpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.711078 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-utilities" (OuterVolumeSpecName: "utilities") pod "a2f64cf9-a3b3-4552-b726-0a81eb3e4137" (UID: "a2f64cf9-a3b3-4552-b726-0a81eb3e4137"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.744136 4634 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2f64cf9-a3b3-4552-b726-0a81eb3e4137" (UID: "a2f64cf9-a3b3-4552-b726-0a81eb3e4137"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.786597 4634 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.787194 4634 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hcpb\" (UniqueName: \"kubernetes.io/projected/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-kube-api-access-4hcpb\") on node \"crc\" DevicePath \"\"" Sep 29 15:13:38 crc kubenswrapper[4634]: I0929 15:13:38.787213 4634 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2f64cf9-a3b3-4552-b726-0a81eb3e4137-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.036829 4634 generic.go:334] "Generic (PLEG): container finished" podID="a2f64cf9-a3b3-4552-b726-0a81eb3e4137" containerID="f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa" exitCode=0 Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.036865 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7skjs" event={"ID":"a2f64cf9-a3b3-4552-b726-0a81eb3e4137","Type":"ContainerDied","Data":"f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa"} Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.036887 4634 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7skjs" event={"ID":"a2f64cf9-a3b3-4552-b726-0a81eb3e4137","Type":"ContainerDied","Data":"226c5f0566c035bc941bdc16224d2e9cd60e459b9392f9053b6474a7c6ea1659"} Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.036904 4634 scope.go:117] "RemoveContainer" containerID="f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.037009 4634 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7skjs" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.087679 4634 scope.go:117] "RemoveContainer" containerID="119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.087958 4634 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7skjs"] Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.104215 4634 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7skjs"] Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.131417 4634 scope.go:117] "RemoveContainer" containerID="97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.167942 4634 scope.go:117] "RemoveContainer" containerID="f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa" Sep 29 15:13:39 crc kubenswrapper[4634]: E0929 15:13:39.168442 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa\": container with ID starting with f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa not found: ID does not exist" containerID="f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.168486 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa"} err="failed to get container status \"f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa\": rpc error: code = NotFound desc = could not find container \"f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa\": container with ID starting with f8ec4a8f7760666c904cb402cbf617e00373cbc3f656a8d7dacf5469a1ef2baa not found: ID does not exist" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.168513 4634 scope.go:117] "RemoveContainer" containerID="119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd" Sep 29 15:13:39 crc kubenswrapper[4634]: E0929 15:13:39.169066 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd\": container with ID starting with 119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd not found: ID does not exist" containerID="119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.169184 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd"} err="failed to get container status \"119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd\": rpc error: code = NotFound desc = could not find container \"119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd\": container with ID starting with 119b34ac16aae303f7c2e2c08f58b5f8d52c8324c859ef2dbce7b44f066013bd not found: ID does not exist" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.169790 4634 scope.go:117] "RemoveContainer" containerID="97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e" Sep 29 15:13:39 crc kubenswrapper[4634]: E0929 15:13:39.170027 4634 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e\": container with ID starting with 97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e not found: ID does not exist" containerID="97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e" Sep 29 15:13:39 crc kubenswrapper[4634]: I0929 15:13:39.170054 4634 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e"} err="failed to get container status \"97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e\": rpc error: code = NotFound desc = could not find container \"97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e\": container with ID starting with 97a611493d45a798948cc3763275240a3415e55ec51abac4e07d73398ce1ad5e not found: ID does not exist" Sep 29 15:13:40 crc kubenswrapper[4634]: I0929 15:13:40.125639 4634 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2f64cf9-a3b3-4552-b726-0a81eb3e4137" path="/var/lib/kubelet/pods/a2f64cf9-a3b3-4552-b726-0a81eb3e4137/volumes" Sep 29 15:13:55 crc kubenswrapper[4634]: I0929 15:13:55.169127 4634 scope.go:117] "RemoveContainer" containerID="e53f474ceb78e929d351d8499895b51439d79632b088f70c0a15d9d7db94e159" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066521140024445 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066521141017363 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066506036016514 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066506036015464 5ustar corecore